Case 1 - atmosphere.rev_c044f_2cf68.AtmosphereFilter.java
boolean resumeOnBroadcast
Left modified signature and body
Right modified assinatura
Unstructured reported conflict including signature and body
Safe reported conflict including signature and body
MergeMethods reported conflict on body
Note: as signature was updated, one of the bodies side is now incompatible
KeepBothMethods kept both versions of method
void suspend
Left modified signature and body. Also extracted executeSuspend method.
Right modified body
Unstructured merged versions. Apparently, some code was lost
Safe kept two versions of the method
MergeMethods kept both versions of method
KeepBothMethods kept both versions of method
boolean outputJunk
Left modified signature and body
Right modified body
Unstructured correctly merged
Safe kept both versions of method
MergeMethods correctly merged
KeepBothMethods kept both versions of method
Base
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public final static String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public final static String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public final static String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public final static String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public final static String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND, RESUME, BROADCAST, SUSPEND_RESUME,
SCHEDULE_RESUME, RESUME_ON_BROADCAST, NONE, SCHEDULE, SUSPEND_RESPONSE,
SUSPEND_TRACKABLE, SUBSCRIBE, SUBSCRIBE_TRACKABLE, PUBLISH
}
private
@Context
HttpServletRequest servletReq;
private
@Context
UriInfo uriInfo;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates =
new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters
= new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
boolean resumeOnBroadcast(ContainerRequest request, boolean resumeOnBroadcast) {
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (transport != null && transport.equals(LONG_POLLING_TRANSPORT)) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(ContainerRequest request, boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && transport.equals(LONG_POLLING_TRANSPORT)) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(ContainerRequest request, ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r =
(AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq
.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
switch (action) {
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(request, s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(request, s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(sessionSupported, resumeOnBroadcast, outputJunk,
translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(request, outputComments);
resumeOnBroadcast = resumeOnBroadcast(request, (action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(sessionSupported, resumeOnBroadcast, outputJunk, timeout, request, response,
broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(
new IllegalStateException("Unable to retrieve suspended Response. " +
"Either session-support is not enabled in atmosphere.xml or the" +
"path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null) throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null) return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null) return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean sessionSupported,
boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
URI location = null;
// Do not add location header if already there.
if (!sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = UUID.randomUUID().toString();
// Re-generate a new one with proper scope.
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
bc = broadcasterFactory.get(c, id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
configureFilter(bc);
r.setBroadcaster(bc);
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
}
if (comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null) {
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] suspendTimeout = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, suspendTimeout, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, suspendTimeout, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
suspendTimeout = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : suspendTimeout) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1) return period;
switch (tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public final static String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public final static String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public final static String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public final static String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public final static String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND, RESUME, BROADCAST, SUSPEND_RESUME,
SCHEDULE_RESUME, RESUME_ON_BROADCAST, NONE, SCHEDULE, SUSPEND_RESPONSE,
SUSPEND_TRACKABLE, SUBSCRIBE, SUBSCRIBE_TRACKABLE, PUBLISH
}
private
@Context
HttpServletRequest servletReq;
private
@Context
UriInfo uriInfo;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates =
new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters
= new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
boolean resumeOnBroadcast(ContainerRequest request, boolean resumeOnBroadcast) {
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (transport != null && transport.equals(LONG_POLLING_TRANSPORT)) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(ContainerRequest request, boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && transport.equals(LONG_POLLING_TRANSPORT)) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(ContainerRequest request, ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r =
(AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq
.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
switch (action) {
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(request, s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(request, s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(sessionSupported, resumeOnBroadcast, outputJunk,
translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(request, outputComments);
resumeOnBroadcast = resumeOnBroadcast(request, (action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(sessionSupported, resumeOnBroadcast, outputJunk, timeout, request, response,
broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(
new IllegalStateException("Unable to retrieve suspended Response. " +
"Either session-support is not enabled in atmosphere.xml or the" +
"path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null) throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null) return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null) return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean sessionSupported,
boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
URI location = null;
// Do not add location header if already there.
if (!sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = UUID.randomUUID().toString();
// Re-generate a new one with proper scope.
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
bc = broadcasterFactory.get(c, id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
configureFilter(bc);
r.setBroadcaster(bc);
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
}
if (comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null) {
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] suspendTimeout = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, suspendTimeout, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, suspendTimeout, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
suspendTimeout = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : suspendTimeout) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1) return period;
switch (tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
Left
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Asynchronous;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceEventListenerAdapter;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.JSONP_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.STREAMING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public final static String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public final static String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public final static String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public final static String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public final static String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND, RESUME, BROADCAST, SUSPEND_RESUME,
SCHEDULE_RESUME, RESUME_ON_BROADCAST, NONE, SCHEDULE, SUSPEND_RESPONSE,
SUSPEND_TRACKABLE, SUBSCRIBE, SUBSCRIBE_TRACKABLE, PUBLISH, ASYNCHRONOUS
}
private
@Context
HttpServletRequest servletReq;
private
@Context
UriInfo uriInfo;
private boolean useResumeAnnotation = false;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates =
new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters
= new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
boolean resumeOnBroadcast(boolean resumeOnBroadcast) {
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(final ContainerRequest request, final ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r =
(AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq
.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
if (Boolean.parseBoolean((String) servletReq.getAttribute(ApplicationConfig.SUPPORT_LOCATION_HEADER))) {
useResumeAnnotation = true;
}
switch (action) {
case ASYNCHRONOUS:
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
String broadcasterName = servletReq.getHeader(topic);
if (transport == null || broadcasterName == null) {
throw new WebApplicationException(new IllegalStateException("Must specify transport using header value "
+ X_ATMOSPHERE_TRANSPORT
+ " and uuid " + X_ATMOSPHERE_TRACKING_ID));
}
String subProtocol = (String) servletReq.getAttribute(FrameworkConfig.WEBSOCKET_SUBPROTOCOL);
final boolean waitForResource = waitFor == -1 ? true : false;
final Broadcaster bcaster = BroadcasterFactory.getDefault().lookup(broadcasterName, true);
if (!transport.startsWith(POLLING_TRANSPORT) && subProtocol == null) {
boolean outputJunk = transport.equalsIgnoreCase(STREAMING_TRANSPORT);
final boolean resumeOnBroadcast = resumeOnBroadcast(false);
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener));
}
}
final Object entity = response.getEntity();
r.addEventListener(new AtmosphereResourceEventListenerAdapter() {
@Override
public void onSuspend(AtmosphereResourceEvent<HttpServletRequest, HttpServletResponse> event) {
try {
if (entity != null) {
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
}
} finally {
event.getResource().removeEventListener(this);
}
}
});
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
r.setBroadcaster(bcaster);
executeSuspend(r, timeout, outputJunk, resumeOnBroadcast, null, request, response, false);
} else {
Object entity = response.getEntity();
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
if (subProtocol == null) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
response.setEntity(entity);
response.write();
} catch (Throwable t) {
logger.debug("Error running Callable", t);
response.setEntity(null);
}
} else {
response.setEntity(null);
}
}
break;
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(resumeOnBroadcast, outputJunk,
translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(outputComments);
resumeOnBroadcast = resumeOnBroadcast((action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(resumeOnBroadcast, outputJunk, timeout, request, response,
broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(
new IllegalStateException("Unable to retrieve suspended Response. " +
"Either session-support is not enabled in atmosphere.xml or the" +
"path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null) throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null) return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null) return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
URI location = null;
// Do not add location header if already there.
if (useResumeAnnotation && !sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null) {
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
r.setBroadcaster(bc);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
executeSuspend(r, timeout, comments, resumeOnBroadcast, location, request, response, true);
}
void executeSuspend(AtmosphereResource r,
long timeout,
boolean comments,
boolean resumeOnBroadcast,
URI location,
ContainerRequest request,
ContainerResponse response,
boolean flushEntity) {
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
configureFilter(r.getBroadcaster());
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null && flushEntity) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
} catch (Throwable t) {
logger.error("Error executing callable {}", entity);
entity = null;
}
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] broadcastFilter = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
broadcastFilter = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : broadcastFilter) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Asynchronous.class)) {
int suspendTimeout = am.getAnnotation(Asynchronous.class).period();
Class[] broadcastFilter = am.getAnnotation(Asynchronous.class).broadcastFilter();
boolean wait = am.getAnnotation(Asynchronous.class).waitForResource();
f = new Filter(Action.ASYNCHRONOUS, suspendTimeout, wait ? -1 : 0, null, false, broadcastFilter, am.getAnnotation(Asynchronous.class).header());
f.setListeners(am.getAnnotation(Asynchronous.class).eventListeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
useResumeAnnotation = true;
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1) return period;
switch (tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Asynchronous;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceEventListenerAdapter;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.JSONP_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.STREAMING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public final static String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public final static String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public final static String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public final static String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public final static String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND, RESUME, BROADCAST, SUSPEND_RESUME,
SCHEDULE_RESUME, RESUME_ON_BROADCAST, NONE, SCHEDULE, SUSPEND_RESPONSE,
SUSPEND_TRACKABLE, SUBSCRIBE, SUBSCRIBE_TRACKABLE, PUBLISH, ASYNCHRONOUS
}
private
@Context
HttpServletRequest servletReq;
private
@Context
UriInfo uriInfo;
private boolean useResumeAnnotation = false;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates =
new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters
= new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
boolean resumeOnBroadcast(boolean resumeOnBroadcast) {
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(final ContainerRequest request, final ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r =
(AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq
.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
if (Boolean.parseBoolean((String) servletReq.getAttribute(ApplicationConfig.SUPPORT_LOCATION_HEADER))) {
useResumeAnnotation = true;
}
switch (action) {
case ASYNCHRONOUS:
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
String broadcasterName = servletReq.getHeader(topic);
if (transport == null || broadcasterName == null) {
throw new WebApplicationException(new IllegalStateException("Must specify transport using header value "
+ X_ATMOSPHERE_TRANSPORT
+ " and uuid " + X_ATMOSPHERE_TRACKING_ID));
}
String subProtocol = (String) servletReq.getAttribute(FrameworkConfig.WEBSOCKET_SUBPROTOCOL);
final boolean waitForResource = waitFor == -1 ? true : false;
final Broadcaster bcaster = BroadcasterFactory.getDefault().lookup(broadcasterName, true);
if (!transport.startsWith(POLLING_TRANSPORT) && subProtocol == null) {
boolean outputJunk = transport.equalsIgnoreCase(STREAMING_TRANSPORT);
final boolean resumeOnBroadcast = resumeOnBroadcast(false);
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener));
}
}
final Object entity = response.getEntity();
r.addEventListener(new AtmosphereResourceEventListenerAdapter() {
@Override
public void onSuspend(AtmosphereResourceEvent<HttpServletRequest, HttpServletResponse> event) {
try {
if (entity != null) {
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
}
} finally {
event.getResource().removeEventListener(this);
}
}
});
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
r.setBroadcaster(bcaster);
executeSuspend(r, timeout, outputJunk, resumeOnBroadcast, null, request, response, false);
} else {
Object entity = response.getEntity();
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
if (subProtocol == null) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
response.setEntity(entity);
response.write();
} catch (Throwable t) {
logger.debug("Error running Callable", t);
response.setEntity(null);
}
} else {
response.setEntity(null);
}
}
break;
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(resumeOnBroadcast, outputJunk,
translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(outputComments);
resumeOnBroadcast = resumeOnBroadcast((action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(resumeOnBroadcast, outputJunk, timeout, request, response,
broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(
new IllegalStateException("Unable to retrieve suspended Response. " +
"Either session-support is not enabled in atmosphere.xml or the" +
"path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null) throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null) return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null) return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
URI location = null;
// Do not add location header if already there.
if (useResumeAnnotation && !sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null) {
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
r.setBroadcaster(bc);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
executeSuspend(r, timeout, comments, resumeOnBroadcast, location, request, response, true);
}
void executeSuspend(AtmosphereResource r,
long timeout,
boolean comments,
boolean resumeOnBroadcast,
URI location,
ContainerRequest request,
ContainerResponse response,
boolean flushEntity) {
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
configureFilter(r.getBroadcaster());
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null && flushEntity) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
} catch (Throwable t) {
logger.error("Error executing callable {}", entity);
entity = null;
}
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] broadcastFilter = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
broadcastFilter = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : broadcastFilter) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Asynchronous.class)) {
int suspendTimeout = am.getAnnotation(Asynchronous.class).period();
Class[] broadcastFilter = am.getAnnotation(Asynchronous.class).broadcastFilter();
boolean wait = am.getAnnotation(Asynchronous.class).waitForResource();
f = new Filter(Action.ASYNCHRONOUS, suspendTimeout, wait ? -1 : 0, null, false, broadcastFilter, am.getAnnotation(Asynchronous.class).header());
f.setListeners(am.getAnnotation(Asynchronous.class).eventListeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
useResumeAnnotation = true;
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1) return period;
switch (tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
Right
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.JSONP_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public final static String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public final static String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public final static String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public final static String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public final static String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND, RESUME, BROADCAST, SUSPEND_RESUME,
SCHEDULE_RESUME, RESUME_ON_BROADCAST, NONE, SCHEDULE, SUSPEND_RESPONSE,
SUSPEND_TRACKABLE, SUBSCRIBE, SUBSCRIBE_TRACKABLE, PUBLISH
}
private
@Context
HttpServletRequest servletReq;
private
@Context
UriInfo uriInfo;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates =
new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters
= new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
boolean resumeOnBroadcast(ContainerRequest request, boolean resumeOnBroadcast) {
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(ContainerRequest request, boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(ContainerRequest request, ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r =
(AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq
.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
switch (action) {
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(request, s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(request, s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(sessionSupported, resumeOnBroadcast, outputJunk,
translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(request, outputComments);
resumeOnBroadcast = resumeOnBroadcast(request, (action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(sessionSupported, resumeOnBroadcast, outputJunk, timeout, request, response,
broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(
new IllegalStateException("Unable to retrieve suspended Response. " +
"Either session-support is not enabled in atmosphere.xml or the" +
"path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null) throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null) return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null) return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean sessionSupported,
boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
URI location = null;
// Do not add location header if already there.
if (!sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null){
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
configureFilter(bc);
r.setBroadcaster(bc);
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null) {
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] suspendTimeout = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, suspendTimeout, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, suspendTimeout, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
suspendTimeout = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : suspendTimeout) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1) return period;
switch (tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.JSONP_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public final static String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public final static String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public final static String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public final static String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public final static String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND, RESUME, BROADCAST, SUSPEND_RESUME,
SCHEDULE_RESUME, RESUME_ON_BROADCAST, NONE, SCHEDULE, SUSPEND_RESPONSE,
SUSPEND_TRACKABLE, SUBSCRIBE, SUBSCRIBE_TRACKABLE, PUBLISH
}
private
@Context
HttpServletRequest servletReq;
private
@Context
UriInfo uriInfo;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates =
new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters
= new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
boolean resumeOnBroadcast(ContainerRequest request, boolean resumeOnBroadcast) {
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(ContainerRequest request, boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(ContainerRequest request, ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r =
(AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq
.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
switch (action) {
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(request, s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(request, s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(sessionSupported, resumeOnBroadcast, outputJunk,
translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(request, outputComments);
resumeOnBroadcast = resumeOnBroadcast(request, (action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(sessionSupported, resumeOnBroadcast, outputJunk, timeout, request, response,
broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(
new IllegalStateException("Unable to retrieve suspended Response. " +
"Either session-support is not enabled in atmosphere.xml or the" +
"path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null) throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null) return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null) return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean sessionSupported,
boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
URI location = null;
// Do not add location header if already there.
if (!sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null){
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
configureFilter(bc);
r.setBroadcaster(bc);
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null) {
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] suspendTimeout = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, suspendTimeout, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, suspendTimeout, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
suspendTimeout = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : suspendTimeout) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1) return period;
switch (tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
MergeMethods
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Asynchronous;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceEventListenerAdapter;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.JSONP_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.STREAMING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public final static String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public final static String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public final static String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public final static String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public final static String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND , RESUME , BROADCAST , SUSPEND_RESUME ,
SCHEDULE_RESUME , RESUME_ON_BROADCAST , NONE , SCHEDULE , SUSPEND_RESPONSE ,
SUSPEND_TRACKABLE , SUBSCRIBE , SUBSCRIBE_TRACKABLE , PUBLISH , ASYNCHRONOUS}
private
@Context
HttpServletRequest servletReq;
private
@Context
UriInfo uriInfo;
private boolean useResumeAnnotation = false;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates =
new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters
= new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
boolean resumeOnBroadcast(boolean resumeOnBroadcast) {
<<<<<<< MINE
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
=======
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
>>>>>>> YOURS
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(final ContainerRequest request, final ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r =
(AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq
.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
if (Boolean.parseBoolean((String) servletReq.getAttribute(ApplicationConfig.SUPPORT_LOCATION_HEADER))) {
useResumeAnnotation = true;
}
switch (action) {
case ASYNCHRONOUS:
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
String broadcasterName = servletReq.getHeader(topic);
if (transport == null || broadcasterName == null) {
throw new WebApplicationException(new IllegalStateException("Must specify transport using header value "
+ X_ATMOSPHERE_TRANSPORT
+ " and uuid " + X_ATMOSPHERE_TRACKING_ID));
}
String subProtocol = (String) servletReq.getAttribute(FrameworkConfig.WEBSOCKET_SUBPROTOCOL);
final boolean waitForResource = waitFor == -1 ? true : false;
final Broadcaster bcaster = BroadcasterFactory.getDefault().lookup(broadcasterName, true);
if (!transport.startsWith(POLLING_TRANSPORT) && subProtocol == null) {
boolean outputJunk = transport.equalsIgnoreCase(STREAMING_TRANSPORT);
final boolean resumeOnBroadcast = resumeOnBroadcast(false);
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener));
}
}
final Object entity = response.getEntity();
r.addEventListener(new AtmosphereResourceEventListenerAdapter() {
@Override
public void onSuspend(AtmosphereResourceEvent<HttpServletRequest, HttpServletResponse> event) {
try {
if (entity != null) {
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
}
} finally {
event.getResource().removeEventListener(this);
}
}
});
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
r.setBroadcaster(bcaster);
executeSuspend(r, timeout, outputJunk, resumeOnBroadcast, null, request, response, false);
} else {
Object entity = response.getEntity();
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
if (subProtocol == null) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
response.setEntity(entity);
response.write();
} catch (Throwable t) {
logger.debug("Error running Callable", t);
response.setEntity(null);
}
} else {
response.setEntity(null);
}
}
break;
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(resumeOnBroadcast, outputJunk,
translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(outputComments);
resumeOnBroadcast = resumeOnBroadcast((action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(resumeOnBroadcast, outputJunk, timeout, request, response,
broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(
new IllegalStateException("Unable to retrieve suspended Response. " +
"Either session-support is not enabled in atmosphere.xml or the" +
"path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null) throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null) return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null) return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
URI location = null;
// Do not add location header if already there.
if (useResumeAnnotation && !sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null) {
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
r.setBroadcaster(bc);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
executeSuspend(r, timeout, comments, resumeOnBroadcast, location, request, response, true);
}
void executeSuspend(AtmosphereResource r,
long timeout,
boolean comments,
boolean resumeOnBroadcast,
URI location,
ContainerRequest request,
ContainerResponse response,
boolean flushEntity) {
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
configureFilter(r.getBroadcaster());
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null && flushEntity) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
} catch (Throwable t) {
logger.error("Error executing callable {}", entity);
entity = null;
}
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
void suspend(boolean sessionSupported,
boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
URI location = null;
// Do not add location header if already there.
if (!sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null){
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
configureFilter(bc);
r.setBroadcaster(bc);
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null) {
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] broadcastFilter = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
broadcastFilter = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : broadcastFilter) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Asynchronous.class)) {
int suspendTimeout = am.getAnnotation(Asynchronous.class).period();
Class[] broadcastFilter = am.getAnnotation(Asynchronous.class).broadcastFilter();
boolean wait = am.getAnnotation(Asynchronous.class).waitForResource();
f = new Filter(Action.ASYNCHRONOUS, suspendTimeout, wait ? -1 : 0, null, false, broadcastFilter, am.getAnnotation(Asynchronous.class).header());
f.setListeners(am.getAnnotation(Asynchronous.class).eventListeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
useResumeAnnotation = true;
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1) return period;
switch (tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Asynchronous;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceEventListenerAdapter;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.JSONP_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.STREAMING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public final static String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public final static String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public final static String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public final static String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public final static String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND , RESUME , BROADCAST , SUSPEND_RESUME ,
SCHEDULE_RESUME , RESUME_ON_BROADCAST , NONE , SCHEDULE , SUSPEND_RESPONSE ,
SUSPEND_TRACKABLE , SUBSCRIBE , SUBSCRIBE_TRACKABLE , PUBLISH , ASYNCHRONOUS}
private
@Context
HttpServletRequest servletReq;
private
@Context
UriInfo uriInfo;
private boolean useResumeAnnotation = false;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates =
new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters
= new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
boolean resumeOnBroadcast(boolean resumeOnBroadcast) {
<<<<<<< MINE
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
=======
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
>>>>>>> YOURS
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(final ContainerRequest request, final ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r =
(AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq
.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
if (Boolean.parseBoolean((String) servletReq.getAttribute(ApplicationConfig.SUPPORT_LOCATION_HEADER))) {
useResumeAnnotation = true;
}
switch (action) {
case ASYNCHRONOUS:
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
String broadcasterName = servletReq.getHeader(topic);
if (transport == null || broadcasterName == null) {
throw new WebApplicationException(new IllegalStateException("Must specify transport using header value "
+ X_ATMOSPHERE_TRANSPORT
+ " and uuid " + X_ATMOSPHERE_TRACKING_ID));
}
String subProtocol = (String) servletReq.getAttribute(FrameworkConfig.WEBSOCKET_SUBPROTOCOL);
final boolean waitForResource = waitFor == -1 ? true : false;
final Broadcaster bcaster = BroadcasterFactory.getDefault().lookup(broadcasterName, true);
if (!transport.startsWith(POLLING_TRANSPORT) && subProtocol == null) {
boolean outputJunk = transport.equalsIgnoreCase(STREAMING_TRANSPORT);
final boolean resumeOnBroadcast = resumeOnBroadcast(false);
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener));
}
}
final Object entity = response.getEntity();
r.addEventListener(new AtmosphereResourceEventListenerAdapter() {
@Override
public void onSuspend(AtmosphereResourceEvent<HttpServletRequest, HttpServletResponse> event) {
try {
if (entity != null) {
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
}
} finally {
event.getResource().removeEventListener(this);
}
}
});
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
r.setBroadcaster(bcaster);
executeSuspend(r, timeout, outputJunk, resumeOnBroadcast, null, request, response, false);
} else {
Object entity = response.getEntity();
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
if (subProtocol == null) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
response.setEntity(entity);
response.write();
} catch (Throwable t) {
logger.debug("Error running Callable", t);
response.setEntity(null);
}
} else {
response.setEntity(null);
}
}
break;
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(resumeOnBroadcast, outputJunk,
translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(outputComments);
resumeOnBroadcast = resumeOnBroadcast((action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(resumeOnBroadcast, outputJunk, timeout, request, response,
broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(
new IllegalStateException("Unable to retrieve suspended Response. " +
"Either session-support is not enabled in atmosphere.xml or the" +
"path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null) throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null) return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null) return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
URI location = null;
// Do not add location header if already there.
if (useResumeAnnotation && !sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null) {
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
r.setBroadcaster(bc);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
executeSuspend(r, timeout, comments, resumeOnBroadcast, location, request, response, true);
}
void executeSuspend(AtmosphereResource r,
long timeout,
boolean comments,
boolean resumeOnBroadcast,
URI location,
ContainerRequest request,
ContainerResponse response,
boolean flushEntity) {
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
configureFilter(r.getBroadcaster());
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null && flushEntity) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
} catch (Throwable t) {
logger.error("Error executing callable {}", entity);
entity = null;
}
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
void suspend(boolean sessionSupported,
boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
URI location = null;
// Do not add location header if already there.
if (!sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null){
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
configureFilter(bc);
r.setBroadcaster(bc);
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null) {
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] broadcastFilter = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
broadcastFilter = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : broadcastFilter) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Asynchronous.class)) {
int suspendTimeout = am.getAnnotation(Asynchronous.class).period();
Class[] broadcastFilter = am.getAnnotation(Asynchronous.class).broadcastFilter();
boolean wait = am.getAnnotation(Asynchronous.class).waitForResource();
f = new Filter(Action.ASYNCHRONOUS, suspendTimeout, wait ? -1 : 0, null, false, broadcastFilter, am.getAnnotation(Asynchronous.class).header());
f.setListeners(am.getAnnotation(Asynchronous.class).eventListeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
useResumeAnnotation = true;
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1) return period;
switch (tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
KeepBothMethods
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Asynchronous;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceEventListenerAdapter;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.JSONP_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.STREAMING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public static final String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public static final String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public static final String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public static final String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public static final String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND(), RESUME(), BROADCAST(), SUSPEND_RESUME(), SCHEDULE_RESUME(), RESUME_ON_BROADCAST(), NONE(), SCHEDULE(), SUSPEND_RESPONSE(), SUSPEND_TRACKABLE(), SUBSCRIBE(), SUBSCRIBE_TRACKABLE(), PUBLISH(), ASYNCHRONOUS()
}
@Context
private HttpServletRequest servletReq;
@Context
private UriInfo uriInfo;
private boolean useResumeAnnotation = false;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates = new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters = new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
boolean resumeOnBroadcast(boolean resumeOnBroadcast) {
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
boolean resumeOnBroadcast(ContainerRequest request, boolean resumeOnBroadcast) {
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(ContainerRequest request, boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(final ContainerRequest request, final ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r = (AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
if (Boolean.parseBoolean((String) servletReq.getAttribute(ApplicationConfig.SUPPORT_LOCATION_HEADER))) {
useResumeAnnotation = true;
}
switch(action) {
case ASYNCHRONOUS:
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
String broadcasterName = servletReq.getHeader(topic);
if (transport == null || broadcasterName == null) {
throw new WebApplicationException(new IllegalStateException("Must specify transport using header value " + X_ATMOSPHERE_TRANSPORT + " and uuid " + X_ATMOSPHERE_TRACKING_ID));
}
String subProtocol = (String) servletReq.getAttribute(FrameworkConfig.WEBSOCKET_SUBPROTOCOL);
final boolean waitForResource = waitFor == -1 ? true : false;
final Broadcaster bcaster = BroadcasterFactory.getDefault().lookup(broadcasterName, true);
if (!transport.startsWith(POLLING_TRANSPORT) && subProtocol == null) {
boolean outputJunk = transport.equalsIgnoreCase(STREAMING_TRANSPORT);
final boolean resumeOnBroadcast = resumeOnBroadcast(false);
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener));
}
}
final Object entity = response.getEntity();
r.addEventListener(new AtmosphereResourceEventListenerAdapter() {
@Override
public void onSuspend(AtmosphereResourceEvent<HttpServletRequest, HttpServletResponse> event) {
try {
if (entity != null) {
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
}
} finally {
event.getResource().removeEventListener(this);
}
}
});
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
r.setBroadcaster(bcaster);
executeSuspend(r, timeout, outputJunk, resumeOnBroadcast, null, request, response, false);
} else {
Object entity = response.getEntity();
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
if (subProtocol == null) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
response.setEntity(entity);
response.write();
} catch (Throwable t) {
logger.debug("Error running Callable", t);
response.setEntity(null);
}
} else {
response.setEntity(null);
}
}
break;
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(resumeOnBroadcast, outputJunk, translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(outputComments);
resumeOnBroadcast = resumeOnBroadcast((action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(resumeOnBroadcast, outputJunk, timeout, request, response, broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(new IllegalStateException("Unable to retrieve suspended Response. " + "Either session-support is not enabled in atmosphere.xml or the" + "path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null)
throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null)
return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null)
return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean resumeOnBroadcast, boolean comments, long timeout, ContainerRequest request, ContainerResponse response, Broadcaster bc, AtmosphereResource<HttpServletRequest, HttpServletResponse> r, Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
URI location = null;
// Do not add location header if already there.
if (useResumeAnnotation && !sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null) {
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
r.setBroadcaster(bc);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
executeSuspend(r, timeout, comments, resumeOnBroadcast, location, request, response, true);
}
void executeSuspend(AtmosphereResource r, long timeout, boolean comments, boolean resumeOnBroadcast, URI location, ContainerRequest request, ContainerResponse response, boolean flushEntity) {
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
configureFilter(r.getBroadcaster());
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(response.getEntity().getClass(), response.getEntityType(), response.getAnnotations(), l);
if (contentType == null || contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ? contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1)) || ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null && flushEntity) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
} catch (Throwable t) {
logger.error("Error executing callable {}", entity);
entity = null;
}
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
void suspend(boolean sessionSupported, boolean resumeOnBroadcast, boolean comments, long timeout, ContainerRequest request, ContainerResponse response, Broadcaster bc, AtmosphereResource<HttpServletRequest, HttpServletResponse> r, Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
URI location = null;
// Do not add location header if already there.
if (!sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null) {
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
configureFilter(bc);
r.setBroadcaster(bc);
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(response.getEntity().getClass(), response.getEntityType(), response.getAnnotations(), l);
if (contentType == null || contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ? contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1)) || ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null) {
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] broadcastFilter = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
broadcastFilter = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : broadcastFilter) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Asynchronous.class)) {
int suspendTimeout = am.getAnnotation(Asynchronous.class).period();
Class[] broadcastFilter = am.getAnnotation(Asynchronous.class).broadcastFilter();
boolean wait = am.getAnnotation(Asynchronous.class).waitForResource();
f = new Filter(Action.ASYNCHRONOUS, suspendTimeout, wait ? -1 : 0, null, false, broadcastFilter, am.getAnnotation(Asynchronous.class).header());
f.setListeners(am.getAnnotation(Asynchronous.class).eventListeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION, false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION, false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
useResumeAnnotation = true;
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1)
return period;
switch(tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Asynchronous;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceEventListenerAdapter;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.JSONP_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.STREAMING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public static final String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public static final String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public static final String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public static final String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public static final String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND(), RESUME(), BROADCAST(), SUSPEND_RESUME(), SCHEDULE_RESUME(), RESUME_ON_BROADCAST(), NONE(), SCHEDULE(), SUSPEND_RESPONSE(), SUSPEND_TRACKABLE(), SUBSCRIBE(), SUBSCRIBE_TRACKABLE(), PUBLISH(), ASYNCHRONOUS()
}
@Context
private HttpServletRequest servletReq;
@Context
private UriInfo uriInfo;
private boolean useResumeAnnotation = false;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates = new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters = new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
boolean resumeOnBroadcast(boolean resumeOnBroadcast) {
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
boolean resumeOnBroadcast(ContainerRequest request, boolean resumeOnBroadcast) {
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(ContainerRequest request, boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(final ContainerRequest request, final ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r = (AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
if (Boolean.parseBoolean((String) servletReq.getAttribute(ApplicationConfig.SUPPORT_LOCATION_HEADER))) {
useResumeAnnotation = true;
}
switch(action) {
case ASYNCHRONOUS:
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
String broadcasterName = servletReq.getHeader(topic);
if (transport == null || broadcasterName == null) {
throw new WebApplicationException(new IllegalStateException("Must specify transport using header value " + X_ATMOSPHERE_TRANSPORT + " and uuid " + X_ATMOSPHERE_TRACKING_ID));
}
String subProtocol = (String) servletReq.getAttribute(FrameworkConfig.WEBSOCKET_SUBPROTOCOL);
final boolean waitForResource = waitFor == -1 ? true : false;
final Broadcaster bcaster = BroadcasterFactory.getDefault().lookup(broadcasterName, true);
if (!transport.startsWith(POLLING_TRANSPORT) && subProtocol == null) {
boolean outputJunk = transport.equalsIgnoreCase(STREAMING_TRANSPORT);
final boolean resumeOnBroadcast = resumeOnBroadcast(false);
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener));
}
}
final Object entity = response.getEntity();
r.addEventListener(new AtmosphereResourceEventListenerAdapter() {
@Override
public void onSuspend(AtmosphereResourceEvent<HttpServletRequest, HttpServletResponse> event) {
try {
if (entity != null) {
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
}
} finally {
event.getResource().removeEventListener(this);
}
}
});
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
r.setBroadcaster(bcaster);
executeSuspend(r, timeout, outputJunk, resumeOnBroadcast, null, request, response, false);
} else {
Object entity = response.getEntity();
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
if (subProtocol == null) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
response.setEntity(entity);
response.write();
} catch (Throwable t) {
logger.debug("Error running Callable", t);
response.setEntity(null);
}
} else {
response.setEntity(null);
}
}
break;
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(resumeOnBroadcast, outputJunk, translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(outputComments);
resumeOnBroadcast = resumeOnBroadcast((action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(resumeOnBroadcast, outputJunk, timeout, request, response, broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(new IllegalStateException("Unable to retrieve suspended Response. " + "Either session-support is not enabled in atmosphere.xml or the" + "path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null)
throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null)
return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null)
return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean resumeOnBroadcast, boolean comments, long timeout, ContainerRequest request, ContainerResponse response, Broadcaster bc, AtmosphereResource<HttpServletRequest, HttpServletResponse> r, Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
URI location = null;
// Do not add location header if already there.
if (useResumeAnnotation && !sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null) {
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
r.setBroadcaster(bc);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
executeSuspend(r, timeout, comments, resumeOnBroadcast, location, request, response, true);
}
void executeSuspend(AtmosphereResource r, long timeout, boolean comments, boolean resumeOnBroadcast, URI location, ContainerRequest request, ContainerResponse response, boolean flushEntity) {
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
configureFilter(r.getBroadcaster());
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(response.getEntity().getClass(), response.getEntityType(), response.getAnnotations(), l);
if (contentType == null || contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ? contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1)) || ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null && flushEntity) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
} catch (Throwable t) {
logger.error("Error executing callable {}", entity);
entity = null;
}
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
void suspend(boolean sessionSupported, boolean resumeOnBroadcast, boolean comments, long timeout, ContainerRequest request, ContainerResponse response, Broadcaster bc, AtmosphereResource<HttpServletRequest, HttpServletResponse> r, Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
URI location = null;
// Do not add location header if already there.
if (!sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null) {
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
configureFilter(bc);
r.setBroadcaster(bc);
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(response.getEntity().getClass(), response.getEntityType(), response.getAnnotations(), l);
if (contentType == null || contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ? contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1)) || ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null) {
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] broadcastFilter = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
broadcastFilter = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : broadcastFilter) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Asynchronous.class)) {
int suspendTimeout = am.getAnnotation(Asynchronous.class).period();
Class[] broadcastFilter = am.getAnnotation(Asynchronous.class).broadcastFilter();
boolean wait = am.getAnnotation(Asynchronous.class).waitForResource();
f = new Filter(Action.ASYNCHRONOUS, suspendTimeout, wait ? -1 : 0, null, false, broadcastFilter, am.getAnnotation(Asynchronous.class).header());
f.setListeners(am.getAnnotation(Asynchronous.class).eventListeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION, false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION, false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
useResumeAnnotation = true;
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1)
return period;
switch(tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
Safe
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Asynchronous;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceEventListenerAdapter;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.JSONP_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.STREAMING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public final static String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public final static String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public final static String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public final static String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public final static String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND , RESUME , BROADCAST , SUSPEND_RESUME ,
SCHEDULE_RESUME , RESUME_ON_BROADCAST , NONE , SCHEDULE , SUSPEND_RESPONSE ,
SUSPEND_TRACKABLE , SUBSCRIBE , SUBSCRIBE_TRACKABLE , PUBLISH , ASYNCHRONOUS}
private
@Context
HttpServletRequest servletReq;
private
@Context
UriInfo uriInfo;
private boolean useResumeAnnotation = false;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates =
new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters
= new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
boolean outputJunk(boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
<<<<<<< MINE
boolean resumeOnBroadcast(ContainerRequest request, boolean resumeOnBroadcast) {
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
=======
boolean resumeOnBroadcast(boolean resumeOnBroadcast) {
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
>>>>>>> YOURS
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(ContainerRequest request, boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(final ContainerRequest request, final ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r =
(AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq
.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
if (Boolean.parseBoolean((String) servletReq.getAttribute(ApplicationConfig.SUPPORT_LOCATION_HEADER))) {
useResumeAnnotation = true;
}
switch (action) {
case ASYNCHRONOUS:
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
String broadcasterName = servletReq.getHeader(topic);
if (transport == null || broadcasterName == null) {
throw new WebApplicationException(new IllegalStateException("Must specify transport using header value "
+ X_ATMOSPHERE_TRANSPORT
+ " and uuid " + X_ATMOSPHERE_TRACKING_ID));
}
String subProtocol = (String) servletReq.getAttribute(FrameworkConfig.WEBSOCKET_SUBPROTOCOL);
final boolean waitForResource = waitFor == -1 ? true : false;
final Broadcaster bcaster = BroadcasterFactory.getDefault().lookup(broadcasterName, true);
if (!transport.startsWith(POLLING_TRANSPORT) && subProtocol == null) {
boolean outputJunk = transport.equalsIgnoreCase(STREAMING_TRANSPORT);
final boolean resumeOnBroadcast = resumeOnBroadcast(false);
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener));
}
}
final Object entity = response.getEntity();
r.addEventListener(new AtmosphereResourceEventListenerAdapter() {
@Override
public void onSuspend(AtmosphereResourceEvent<HttpServletRequest, HttpServletResponse> event) {
try {
if (entity != null) {
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
}
} finally {
event.getResource().removeEventListener(this);
}
}
});
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
r.setBroadcaster(bcaster);
executeSuspend(r, timeout, outputJunk, resumeOnBroadcast, null, request, response, false);
} else {
Object entity = response.getEntity();
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
if (subProtocol == null) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
response.setEntity(entity);
response.write();
} catch (Throwable t) {
logger.debug("Error running Callable", t);
response.setEntity(null);
}
} else {
response.setEntity(null);
}
}
break;
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(resumeOnBroadcast, outputJunk,
translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(outputComments);
resumeOnBroadcast = resumeOnBroadcast((action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(resumeOnBroadcast, outputJunk, timeout, request, response,
broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(
new IllegalStateException("Unable to retrieve suspended Response. " +
"Either session-support is not enabled in atmosphere.xml or the" +
"path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null) throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null) return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null) return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
URI location = null;
// Do not add location header if already there.
if (useResumeAnnotation && !sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null) {
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
r.setBroadcaster(bc);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
executeSuspend(r, timeout, comments, resumeOnBroadcast, location, request, response, true);
}
void executeSuspend(AtmosphereResource r,
long timeout,
boolean comments,
boolean resumeOnBroadcast,
URI location,
ContainerRequest request,
ContainerResponse response,
boolean flushEntity) {
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
configureFilter(r.getBroadcaster());
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null && flushEntity) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
} catch (Throwable t) {
logger.error("Error executing callable {}", entity);
entity = null;
}
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
void suspend(boolean sessionSupported,
boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
URI location = null;
// Do not add location header if already there.
if (!sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null){
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
configureFilter(bc);
r.setBroadcaster(bc);
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null) {
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] broadcastFilter = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
broadcastFilter = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : broadcastFilter) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Asynchronous.class)) {
int suspendTimeout = am.getAnnotation(Asynchronous.class).period();
Class[] broadcastFilter = am.getAnnotation(Asynchronous.class).broadcastFilter();
boolean wait = am.getAnnotation(Asynchronous.class).waitForResource();
f = new Filter(Action.ASYNCHRONOUS, suspendTimeout, wait ? -1 : 0, null, false, broadcastFilter, am.getAnnotation(Asynchronous.class).header());
f.setListeners(am.getAnnotation(Asynchronous.class).eventListeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
useResumeAnnotation = true;
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1) return period;
switch (tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Asynchronous;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceEventListenerAdapter;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.JSONP_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.STREAMING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public final static String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public final static String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public final static String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public final static String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public final static String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND , RESUME , BROADCAST , SUSPEND_RESUME ,
SCHEDULE_RESUME , RESUME_ON_BROADCAST , NONE , SCHEDULE , SUSPEND_RESPONSE ,
SUSPEND_TRACKABLE , SUBSCRIBE , SUBSCRIBE_TRACKABLE , PUBLISH , ASYNCHRONOUS}
private
@Context
HttpServletRequest servletReq;
private
@Context
UriInfo uriInfo;
private boolean useResumeAnnotation = false;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates =
new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters
= new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
boolean outputJunk(boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
<<<<<<< MINE
boolean resumeOnBroadcast(ContainerRequest request, boolean resumeOnBroadcast) {
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
=======
boolean resumeOnBroadcast(boolean resumeOnBroadcast) {
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
>>>>>>> YOURS
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(ContainerRequest request, boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(final ContainerRequest request, final ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r =
(AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq
.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
if (Boolean.parseBoolean((String) servletReq.getAttribute(ApplicationConfig.SUPPORT_LOCATION_HEADER))) {
useResumeAnnotation = true;
}
switch (action) {
case ASYNCHRONOUS:
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
String broadcasterName = servletReq.getHeader(topic);
if (transport == null || broadcasterName == null) {
throw new WebApplicationException(new IllegalStateException("Must specify transport using header value "
+ X_ATMOSPHERE_TRANSPORT
+ " and uuid " + X_ATMOSPHERE_TRACKING_ID));
}
String subProtocol = (String) servletReq.getAttribute(FrameworkConfig.WEBSOCKET_SUBPROTOCOL);
final boolean waitForResource = waitFor == -1 ? true : false;
final Broadcaster bcaster = BroadcasterFactory.getDefault().lookup(broadcasterName, true);
if (!transport.startsWith(POLLING_TRANSPORT) && subProtocol == null) {
boolean outputJunk = transport.equalsIgnoreCase(STREAMING_TRANSPORT);
final boolean resumeOnBroadcast = resumeOnBroadcast(false);
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener));
}
}
final Object entity = response.getEntity();
r.addEventListener(new AtmosphereResourceEventListenerAdapter() {
@Override
public void onSuspend(AtmosphereResourceEvent<HttpServletRequest, HttpServletResponse> event) {
try {
if (entity != null) {
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
}
} finally {
event.getResource().removeEventListener(this);
}
}
});
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
r.setBroadcaster(bcaster);
executeSuspend(r, timeout, outputJunk, resumeOnBroadcast, null, request, response, false);
} else {
Object entity = response.getEntity();
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
if (subProtocol == null) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
response.setEntity(entity);
response.write();
} catch (Throwable t) {
logger.debug("Error running Callable", t);
response.setEntity(null);
}
} else {
response.setEntity(null);
}
}
break;
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(resumeOnBroadcast, outputJunk,
translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(outputComments);
resumeOnBroadcast = resumeOnBroadcast((action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(resumeOnBroadcast, outputJunk, timeout, request, response,
broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(
new IllegalStateException("Unable to retrieve suspended Response. " +
"Either session-support is not enabled in atmosphere.xml or the" +
"path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null) throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null) return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null) return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
URI location = null;
// Do not add location header if already there.
if (useResumeAnnotation && !sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null) {
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
r.setBroadcaster(bc);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
executeSuspend(r, timeout, comments, resumeOnBroadcast, location, request, response, true);
}
void executeSuspend(AtmosphereResource r,
long timeout,
boolean comments,
boolean resumeOnBroadcast,
URI location,
ContainerRequest request,
ContainerResponse response,
boolean flushEntity) {
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
configureFilter(r.getBroadcaster());
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null && flushEntity) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
} catch (Throwable t) {
logger.error("Error executing callable {}", entity);
entity = null;
}
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
void suspend(boolean sessionSupported,
boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
URI location = null;
// Do not add location header if already there.
if (!sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
if (id == null){
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
configureFilter(bc);
r.setBroadcaster(bc);
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null) {
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] broadcastFilter = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
broadcastFilter = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : broadcastFilter) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Asynchronous.class)) {
int suspendTimeout = am.getAnnotation(Asynchronous.class).period();
Class[] broadcastFilter = am.getAnnotation(Asynchronous.class).broadcastFilter();
boolean wait = am.getAnnotation(Asynchronous.class).waitForResource();
f = new Filter(Action.ASYNCHRONOUS, suspendTimeout, wait ? -1 : 0, null, false, broadcastFilter, am.getAnnotation(Asynchronous.class).header());
f.setListeners(am.getAnnotation(Asynchronous.class).eventListeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
useResumeAnnotation = true;
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1) return period;
switch (tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
Unstructured
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Asynchronous;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceEventListenerAdapter;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.JSONP_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.STREAMING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public final static String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public final static String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public final static String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public final static String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public final static String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND, RESUME, BROADCAST, SUSPEND_RESUME,
SCHEDULE_RESUME, RESUME_ON_BROADCAST, NONE, SCHEDULE, SUSPEND_RESPONSE,
SUSPEND_TRACKABLE, SUBSCRIBE, SUBSCRIBE_TRACKABLE, PUBLISH, ASYNCHRONOUS
}
private
@Context
HttpServletRequest servletReq;
private
@Context
UriInfo uriInfo;
private boolean useResumeAnnotation = false;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates =
new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters
= new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
<<<<<<< MINE
boolean resumeOnBroadcast(boolean resumeOnBroadcast) {
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
=======
boolean resumeOnBroadcast(ContainerRequest request, boolean resumeOnBroadcast) {
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
>>>>>>> YOURS
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(final ContainerRequest request, final ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r =
(AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq
.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
if (Boolean.parseBoolean((String) servletReq.getAttribute(ApplicationConfig.SUPPORT_LOCATION_HEADER))) {
useResumeAnnotation = true;
}
switch (action) {
case ASYNCHRONOUS:
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
String broadcasterName = servletReq.getHeader(topic);
if (transport == null || broadcasterName == null) {
throw new WebApplicationException(new IllegalStateException("Must specify transport using header value "
+ X_ATMOSPHERE_TRANSPORT
+ " and uuid " + X_ATMOSPHERE_TRACKING_ID));
}
String subProtocol = (String) servletReq.getAttribute(FrameworkConfig.WEBSOCKET_SUBPROTOCOL);
final boolean waitForResource = waitFor == -1 ? true : false;
final Broadcaster bcaster = BroadcasterFactory.getDefault().lookup(broadcasterName, true);
if (!transport.startsWith(POLLING_TRANSPORT) && subProtocol == null) {
boolean outputJunk = transport.equalsIgnoreCase(STREAMING_TRANSPORT);
final boolean resumeOnBroadcast = resumeOnBroadcast(false);
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener));
}
}
final Object entity = response.getEntity();
r.addEventListener(new AtmosphereResourceEventListenerAdapter() {
@Override
public void onSuspend(AtmosphereResourceEvent<HttpServletRequest, HttpServletResponse> event) {
try {
if (entity != null) {
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
}
} finally {
event.getResource().removeEventListener(this);
}
}
});
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
r.setBroadcaster(bcaster);
executeSuspend(r, timeout, outputJunk, resumeOnBroadcast, null, request, response, false);
} else {
Object entity = response.getEntity();
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
if (subProtocol == null) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
response.setEntity(entity);
response.write();
} catch (Throwable t) {
logger.debug("Error running Callable", t);
response.setEntity(null);
}
} else {
response.setEntity(null);
}
}
break;
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(resumeOnBroadcast, outputJunk,
translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(outputComments);
resumeOnBroadcast = resumeOnBroadcast((action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(resumeOnBroadcast, outputJunk, timeout, request, response,
broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(
new IllegalStateException("Unable to retrieve suspended Response. " +
"Either session-support is not enabled in atmosphere.xml or the" +
"path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null) throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null) return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null) return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
URI location = null;
// Do not add location header if already there.
if (useResumeAnnotation && !sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
<<<<<<< MINE
if (id == null) {
=======
if (id == null){
>>>>>>> YOURS
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
r.setBroadcaster(bc);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
executeSuspend(r, timeout, comments, resumeOnBroadcast, location, request, response, true);
}
void executeSuspend(AtmosphereResource r,
long timeout,
boolean comments,
boolean resumeOnBroadcast,
URI location,
ContainerRequest request,
ContainerResponse response,
boolean flushEntity) {
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
configureFilter(r.getBroadcaster());
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null && flushEntity) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
} catch (Throwable t) {
logger.error("Error executing callable {}", entity);
entity = null;
}
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] broadcastFilter = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
broadcastFilter = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : broadcastFilter) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Asynchronous.class)) {
int suspendTimeout = am.getAnnotation(Asynchronous.class).period();
Class[] broadcastFilter = am.getAnnotation(Asynchronous.class).broadcastFilter();
boolean wait = am.getAnnotation(Asynchronous.class).waitForResource();
f = new Filter(Action.ASYNCHRONOUS, suspendTimeout, wait ? -1 : 0, null, false, broadcastFilter, am.getAnnotation(Asynchronous.class).header());
f.setListeners(am.getAnnotation(Asynchronous.class).eventListeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
useResumeAnnotation = true;
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1) return period;
switch (tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
/*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 2007-2008 Sun Microsystems, Inc. All rights reserved.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common Development
* and Distribution License("CDDL") (collectively, the "License"). You
* may not use this file except in compliance with the License. You can obtain
* a copy of the License at https://glassfish.dev.java.net/public/CDDL+GPL.html
* or glassfish/bootstrap/legal/LICENSE.txt. See the License for the specific
* language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file at glassfish/bootstrap/legal/LICENSE.txt.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the License
* Header, with the fields enclosed by brackets [] replaced by your own
* identifying information: "Portions Copyrighted [year]
* [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding "[Contributor]
* elects to include this software in this distribution under the [CDDL or GPL
* Version 2] license." If you don't indicate a single choice of license, a
* recipient has the option to distribute your version of this file under
* either the CDDL, the GPL Version 2 or to extend the choice of license to
* its licensees as provided above. However, if you add GPL Version 2 code
* and therefore, elected the GPL Version 2 license, then the option applies
* only if the new code is made subject to such option by the copyright
* holder.
*/
package org.atmosphere.jersey;
import com.sun.jersey.api.JResponseAsResponse;
import com.sun.jersey.api.model.AbstractMethod;
import com.sun.jersey.spi.container.ContainerRequest;
import com.sun.jersey.spi.container.ContainerRequestFilter;
import com.sun.jersey.spi.container.ContainerResponse;
import com.sun.jersey.spi.container.ContainerResponseFilter;
import com.sun.jersey.spi.container.ResourceFilter;
import com.sun.jersey.spi.container.ResourceFilterFactory;
import org.atmosphere.annotation.Asynchronous;
import org.atmosphere.annotation.Broadcast;
import org.atmosphere.annotation.Cluster;
import org.atmosphere.annotation.Publish;
import org.atmosphere.annotation.Resume;
import org.atmosphere.annotation.Schedule;
import org.atmosphere.annotation.Subscribe;
import org.atmosphere.annotation.Suspend;
import org.atmosphere.cpr.ApplicationConfig;
import org.atmosphere.cpr.AtmosphereEventLifecycle;
import org.atmosphere.cpr.AtmosphereResource;
import org.atmosphere.cpr.AtmosphereResourceEvent;
import org.atmosphere.cpr.AtmosphereResourceEventListener;
import org.atmosphere.cpr.AtmosphereResourceEventListenerAdapter;
import org.atmosphere.cpr.AtmosphereResourceImpl;
import org.atmosphere.cpr.BroadcastFilter;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterConfig;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.cpr.ClusterBroadcastFilter;
import org.atmosphere.cpr.FrameworkConfig;
import org.atmosphere.cpr.Trackable;
import org.atmosphere.di.InjectorProvider;
import org.atmosphere.websocket.WebSocket;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.net.URI;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_CREDENTIALS;
import static org.atmosphere.cpr.HeaderConfig.ACCESS_CONTROL_ALLOW_ORIGIN;
import static org.atmosphere.cpr.HeaderConfig.CACHE_CONTROL;
import static org.atmosphere.cpr.HeaderConfig.EXPIRES;
import static org.atmosphere.cpr.HeaderConfig.JSONP_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.LONG_POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.POLLING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.PRAGMA;
import static org.atmosphere.cpr.HeaderConfig.STREAMING_TRANSPORT;
import static org.atmosphere.cpr.HeaderConfig.WEBSOCKET_UPGRADE;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_ERROR;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRACKING_ID;
import static org.atmosphere.cpr.HeaderConfig.X_ATMOSPHERE_TRANSPORT;
/**
* {@link ResourceFilterFactory} which intercept the response and appropriately
* set the {@link AtmosphereResourceEvent} filed based on the annotation the application
* has defined.
*
* @author Jeanfrancois Arcand
*/
public class AtmosphereFilter implements ResourceFilterFactory {
private static final Logger logger = LoggerFactory.getLogger(AtmosphereFilter.class);
public final static String SUSPENDED_RESOURCE = AtmosphereFilter.class.getName() + ".suspendedResource";
public final static String RESUME_UUID = AtmosphereFilter.class.getName() + ".uuid";
public final static String RESUME_CANDIDATES = AtmosphereFilter.class.getName() + ".resumeCandidates";
public final static String INJECTED_BROADCASTER = AtmosphereFilter.class.getName() + "injectedBroadcaster";
public final static String INJECTED_TRACKABLE = AtmosphereFilter.class.getName() + "injectedTrackable";
enum Action {
SUSPEND, RESUME, BROADCAST, SUSPEND_RESUME,
SCHEDULE_RESUME, RESUME_ON_BROADCAST, NONE, SCHEDULE, SUSPEND_RESPONSE,
SUSPEND_TRACKABLE, SUBSCRIBE, SUBSCRIBE_TRACKABLE, PUBLISH, ASYNCHRONOUS
}
private
@Context
HttpServletRequest servletReq;
private
@Context
UriInfo uriInfo;
private boolean useResumeAnnotation = false;
private final ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>> resumeCandidates =
new ConcurrentHashMap<String, AtmosphereResource<HttpServletRequest, HttpServletResponse>>();
private class Filter implements ResourceFilter, ContainerResponseFilter {
private final Action action;
private final long timeout;
private final int waitFor;
private final Suspend.SCOPE scope;
private final Class<BroadcastFilter>[] filters;
private Class<? extends AtmosphereResourceEventListener>[] listeners = null;
private final boolean outputComments;
private final ArrayList<ClusterBroadcastFilter> clusters
= new ArrayList<ClusterBroadcastFilter>();
private final String topic;
protected Filter(Action action) {
this(action, -1);
}
protected Filter(Action action, long timeout) {
this(action, timeout, 0);
}
protected Filter(Action action, long timeout, int waitFor) {
this(action, timeout, waitFor, Suspend.SCOPE.APPLICATION);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope) {
this(action, timeout, waitFor, scope, true);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments) {
this(action, timeout, waitFor, scope, outputComments, null, null);
}
protected Filter(Action action, long timeout, int waitFor, Suspend.SCOPE scope, boolean outputComments, Class<BroadcastFilter>[] filters, String topic) {
this.action = action;
this.timeout = timeout;
this.scope = scope;
this.outputComments = outputComments;
this.waitFor = waitFor;
this.filters = filters;
this.topic = topic;
}
public ContainerRequestFilter getRequestFilter() {
return null;
}
public ContainerResponseFilter getResponseFilter() {
return this;
}
<<<<<<< MINE
boolean resumeOnBroadcast(boolean resumeOnBroadcast) {
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
=======
boolean resumeOnBroadcast(ContainerRequest request, boolean resumeOnBroadcast) {
String transport = request.getHeaderValue(X_ATMOSPHERE_TRANSPORT);
>>>>>>> YOURS
if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return true;
}
return resumeOnBroadcast;
}
boolean outputJunk(boolean outputJunk) {
boolean webSocketEnabled = false;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade.trim().equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
webSocketEnabled = true;
break;
}
}
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
if (webSocketEnabled) {
return false;
} else if (transport != null && (transport.equals(JSONP_TRANSPORT) || transport.equals(LONG_POLLING_TRANSPORT))) {
return false;
}
return outputJunk;
}
/**
* Configure the {@link AtmosphereResourceEvent} state (suspend, resume, broadcast)
* based on the annotation the web application has used.
*
* @param request the {@link ContainerRequest}
* @param response the {@link ContainerResponse}
* @return the {@link ContainerResponse}
*/
public ContainerResponse filter(final ContainerRequest request, final ContainerResponse response) {
if (response.getMappedThrowable() != null) {
return response;
}
AtmosphereResource<HttpServletRequest, HttpServletResponse> r =
(AtmosphereResource<HttpServletRequest, HttpServletResponse>) servletReq
.getAttribute(FrameworkConfig.ATMOSPHERE_RESOURCE);
if (Boolean.parseBoolean((String) servletReq.getAttribute(ApplicationConfig.SUPPORT_LOCATION_HEADER))) {
useResumeAnnotation = true;
}
switch (action) {
case ASYNCHRONOUS:
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
String transport = servletReq.getHeader(X_ATMOSPHERE_TRANSPORT);
String broadcasterName = servletReq.getHeader(topic);
if (transport == null || broadcasterName == null) {
throw new WebApplicationException(new IllegalStateException("Must specify transport using header value "
+ X_ATMOSPHERE_TRANSPORT
+ " and uuid " + X_ATMOSPHERE_TRACKING_ID));
}
String subProtocol = (String) servletReq.getAttribute(FrameworkConfig.WEBSOCKET_SUBPROTOCOL);
final boolean waitForResource = waitFor == -1 ? true : false;
final Broadcaster bcaster = BroadcasterFactory.getDefault().lookup(broadcasterName, true);
if (!transport.startsWith(POLLING_TRANSPORT) && subProtocol == null) {
boolean outputJunk = transport.equalsIgnoreCase(STREAMING_TRANSPORT);
final boolean resumeOnBroadcast = resumeOnBroadcast(false);
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener));
}
}
final Object entity = response.getEntity();
r.addEventListener(new AtmosphereResourceEventListenerAdapter() {
@Override
public void onSuspend(AtmosphereResourceEvent<HttpServletRequest, HttpServletResponse> event) {
try {
if (entity != null) {
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
}
} finally {
event.getResource().removeEventListener(this);
}
}
});
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
r.setBroadcaster(bcaster);
executeSuspend(r, timeout, outputJunk, resumeOnBroadcast, null, request, response, false);
} else {
Object entity = response.getEntity();
if (waitForResource) {
bcaster.awaitAndBroadcast(entity, 30, TimeUnit.SECONDS);
} else {
bcaster.broadcast(entity);
}
if (subProtocol == null) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
response.setEntity(entity);
response.write();
} catch (Throwable t) {
logger.debug("Error running Callable", t);
response.setEntity(null);
}
} else {
response.setEntity(null);
}
}
break;
case SUSPEND_RESPONSE:
SuspendResponse<?> s = SuspendResponse.class.cast(JResponseAsResponse.class.cast(response.getResponse()).getJResponse());
boolean outputJunk = outputJunk(s.outputComments());
boolean resumeOnBroadcast = resumeOnBroadcast(s.resumeOnBroadcast());
for (AtmosphereResourceEventListener el : s.listeners()) {
if (r instanceof AtmosphereEventLifecycle) {
r.addEventListener(el);
}
}
Broadcaster bc = s.broadcaster();
if (bc == null && s.scope() != Suspend.SCOPE.REQUEST) {
bc = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
}
boolean supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
boolean isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
TrackableResource<? extends Trackable> trackableResource = null;
if (isTracked) {
trackableResource = preTrack(request, response);
}
suspend(resumeOnBroadcast, outputJunk,
translateTimeUnit(s.period().value(), s.period().timeUnit()), request, response, bc, r, s.scope());
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case SUBSCRIBE_TRACKABLE:
case SUBSCRIBE:
case SUSPEND:
case SUSPEND_TRACKABLE:
case SUSPEND_RESUME:
outputJunk = outputJunk(outputComments);
resumeOnBroadcast = resumeOnBroadcast((action == Action.SUSPEND_RESUME));
for (Class<? extends AtmosphereResourceEventListener> listener : listeners) {
try {
AtmosphereResourceEventListener el = listener.newInstance();
InjectorProvider.getInjector().inject(el);
if (r instanceof AtmosphereEventLifecycle) {
((AtmosphereEventLifecycle) r).addEventListener(el);
}
} catch (Throwable t) {
throw new WebApplicationException(
new IllegalStateException("Invalid AtmosphereResourceEventListener " + listener, t));
}
}
Broadcaster broadcaster = (Broadcaster) servletReq.getAttribute(INJECTED_BROADCASTER);
// @Subscribe
if (action == Action.SUBSCRIBE) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
broadcaster = BroadcasterFactory.getDefault().lookup(c, topic, true);
}
// Tracking is enabled by default
supportTrackable = servletReq.getAttribute(ApplicationConfig.SUPPORT_TRACKABLE) != null;
// Register our TrackableResource
isTracked = response.getEntity() != null ? TrackableResource.class.isAssignableFrom(response.getEntity().getClass()) : supportTrackable;
if (isTracked) {
trackableResource = preTrack(request, response);
} else {
trackableResource = null;
}
suspend(resumeOnBroadcast, outputJunk, timeout, request, response,
broadcaster, r, scope);
// Associate the tracked resource.
if (isTracked && trackableResource != null) {
postTrack(trackableResource, r);
}
break;
case RESUME:
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
if (sessionSupported) {
r = (AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
} else {
String path = response.getContainerRequest().getPath();
r = resumeCandidates.remove(path.substring(path.lastIndexOf("/") + 1));
}
if (r != null) {
resume(r);
} else {
throw new WebApplicationException(
new IllegalStateException("Unable to retrieve suspended Response. " +
"Either session-support is not enabled in atmosphere.xml or the" +
"path used to resume is invalid."));
}
break;
case BROADCAST:
case PUBLISH:
case RESUME_ON_BROADCAST:
AtmosphereResource ar = (AtmosphereResource) servletReq.getAttribute(SUSPENDED_RESOURCE);
if (ar != null) {
r = ar;
}
if (action == Action.PUBLISH) {
Class<Broadcaster> c = null;
try {
c = (Class<Broadcaster>) Class.forName((String) servletReq.getAttribute(ApplicationConfig.BROADCASTER_CLASS));
} catch (Throwable e) {
throw new IllegalStateException(e.getMessage());
}
r.setBroadcaster(BroadcasterFactory.getDefault().lookup(c, topic, true));
}
broadcast(response, r, timeout);
break;
case SCHEDULE:
case SCHEDULE_RESUME:
Object o = response.getEntity();
Broadcaster b = r.getBroadcaster();
if (response.getEntity() instanceof Broadcastable) {
b = ((Broadcastable) response.getEntity()).getBroadcaster();
o = ((Broadcastable) response.getEntity()).getMessage();
response.setEntity(((Broadcastable) response.getEntity()).getResponseMessage());
}
if (response.getEntity() != null) {
try {
response.write();
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
if (action == Action.SCHEDULE_RESUME) {
configureResumeOnBroadcast(b);
}
b.scheduleFixedBroadcast(o, waitFor, timeout, TimeUnit.SECONDS);
break;
}
return response;
}
TrackableResource preTrack(ContainerRequest request, ContainerResponse response) {
TrackableResource<? extends Trackable> trackableResource = TrackableResource.class.cast(response.getEntity());
if (trackableResource == null) {
trackableResource = new TrackableResource<AtmosphereResource>(AtmosphereResource.class, servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID), "");
} else {
response.setEntity(trackableResource.entity());
}
String trackableUUID = request.getHeaderValue(X_ATMOSPHERE_TRACKING_ID);
if (trackableUUID == null && trackableResource.trackingID() != null) {
trackableUUID = trackableResource.trackingID();
} else if (trackableUUID == null) {
trackableUUID = UUID.randomUUID().toString();
}
trackableResource.setTrackingID(trackableUUID);
TrackableSession.getDefault().track(trackableResource);
response.getHttpHeaders().putSingle(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
servletReq.setAttribute(X_ATMOSPHERE_TRACKING_ID, trackableResource.trackingID());
return trackableResource;
}
void postTrack(TrackableResource trackableResource, AtmosphereResource r) {
boolean isAresource = AtmosphereResource.class.isAssignableFrom(trackableResource.type()) ? true : false;
trackableResource.setResource(isAresource ? r : r.getBroadcaster());
}
Response.ResponseBuilder configureHeaders(Response.ResponseBuilder b) throws IOException {
boolean webSocketSupported = servletReq.getAttribute(WebSocket.WEBSOCKET_SUSPEND) != null;
if (servletReq.getHeaders("Connection") != null && servletReq.getHeaders("Connection").hasMoreElements()) {
String[] e = ((Enumeration<String>) servletReq.getHeaders("Connection")).nextElement().split(",");
for (String upgrade : e) {
if (upgrade != null && upgrade.equalsIgnoreCase(WEBSOCKET_UPGRADE)) {
if (!webSocketSupported) {
b = b.header(X_ATMOSPHERE_ERROR, "Websocket protocol not supported");
}
}
}
}
boolean injectCacheHeaders = (Boolean) servletReq.getAttribute(ApplicationConfig.NO_CACHE_HEADERS);
boolean enableAccessControl = (Boolean) servletReq.getAttribute(ApplicationConfig.DROP_ACCESS_CONTROL_ALLOW_ORIGIN_HEADER);
if (injectCacheHeaders) {
// Set to expire far in the past.
b = b.header(EXPIRES, "-1");
// Set standard HTTP/1.1 no-cache headers.
b = b.header(CACHE_CONTROL, "no-store, no-cache, must-revalidate");
// Set standard HTTP/1.0 no-cache header.
b = b.header(PRAGMA, "no-cache");
}
if (enableAccessControl) {
b = b.header(ACCESS_CONTROL_ALLOW_ORIGIN, "*");
b = b.header(ACCESS_CONTROL_ALLOW_CREDENTIALS, "true");
}
return b;
}
void configureResumeOnBroadcast(Broadcaster b) {
Iterator<AtmosphereResource<?, ?>> i = b.getAtmosphereResources().iterator();
while (i.hasNext()) {
HttpServletRequest r = (HttpServletRequest) i.next().getRequest();
r.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, true);
}
}
void configureFilter(Broadcaster bc) {
if (bc == null) throw new WebApplicationException(new IllegalStateException("Broadcaster cannot be null"));
/**
* Here we can't predict if it's the same set of filter shared across all Broadcaster as
* Broadcaster can have their own BroadcasterConfig instance.
*/
BroadcasterConfig c = bc.getBroadcasterConfig();
// Already configured
if (c.hasFilters()) {
return;
}
// Always the first one, before any transformation/filtering
for (ClusterBroadcastFilter cbf : clusters) {
cbf.setBroadcaster(bc);
c.addFilter(cbf);
}
BroadcastFilter f = null;
if (filters != null) {
for (Class<BroadcastFilter> filter : filters) {
try {
f = filter.newInstance();
InjectorProvider.getInjector().inject(f);
} catch (Throwable t) {
logger.warn("Invalid @BroadcastFilter: " + filter, t);
}
c.addFilter(f);
}
}
}
private void setListeners(Class<? extends AtmosphereResourceEventListener>[] listeners) {
this.listeners = listeners;
}
void broadcast(ContainerResponse r, AtmosphereResource ar, long delay) {
Object o = r.getEntity();
Broadcaster b = ar.getBroadcaster();
Object msg = o;
String returnMsg = null;
// Something went wrong if null.
if (o instanceof Broadcastable) {
if (((Broadcastable) o).getBroadcaster() != null) {
b = ((Broadcastable) o).getBroadcaster();
}
msg = ((Broadcastable) o).getMessage();
returnMsg = ((Broadcastable) o).getResponseMessage().toString();
}
if (action == Action.RESUME_ON_BROADCAST) {
configureResumeOnBroadcast(b);
}
if (o != null) {
addFilter(b);
try {
r.setEntity(msg);
if (msg == null) return;
if (delay == -1) {
Future<Object> f = b.broadcast(msg);
if (f == null) return;
Object t = f.get();
if (o instanceof Broadcastable) {
r.setEntity(returnMsg);
}
} else if (delay == 0) {
b.delayBroadcast(msg);
} else {
b.delayBroadcast(msg, delay, TimeUnit.SECONDS);
}
} catch (InterruptedException ex) {
logger.error("broadcast interrupted", ex);
} catch (ExecutionException ex) {
logger.error("execution exception during broadcast", ex);
}
}
}
void addFilter(Broadcaster bc) {
configureFilter(bc);
}
void resume(AtmosphereResource resource) {
resource.resume();
}
void addCluster(ClusterBroadcastFilter f) {
clusters.add(f);
}
void suspend(boolean resumeOnBroadcast,
boolean comments,
long timeout,
ContainerRequest request,
ContainerResponse response,
Broadcaster bc,
AtmosphereResource<HttpServletRequest, HttpServletResponse> r,
Suspend.SCOPE localScope) {
// Force the status code to 200 events independently of the value of the entity (null or not)
if (response.getStatus() == 204) {
response.setStatus(200);
}
BroadcasterFactory broadcasterFactory = (BroadcasterFactory) servletReq
.getAttribute(ApplicationConfig.BROADCASTER_FACTORY);
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
URI location = null;
// Do not add location header if already there.
if (useResumeAnnotation && !sessionSupported && !resumeOnBroadcast && response.getHttpHeaders().getFirst("Location") == null) {
String uuid = UUID.randomUUID().toString();
location = uriInfo.getAbsolutePathBuilder().path(uuid).build("");
resumeCandidates.put(uuid, r);
servletReq.setAttribute(RESUME_UUID, uuid);
servletReq.setAttribute(RESUME_CANDIDATES, resumeCandidates);
}
if (bc == null && localScope != Suspend.SCOPE.REQUEST) {
bc = r.getBroadcaster();
}
if (sessionSupported && localScope != Suspend.SCOPE.REQUEST && servletReq.getSession().getAttribute(SUSPENDED_RESOURCE) != null) {
AtmosphereResource<HttpServletRequest, HttpServletResponse> cached =
(AtmosphereResource) servletReq.getSession().getAttribute(SUSPENDED_RESOURCE);
bc = cached.getBroadcaster();
// Just in case something went wrong.
try {
bc.removeAtmosphereResource(cached);
} catch (IllegalStateException ex) {
logger.trace(ex.getMessage(), ex);
}
}
if (response.getEntity() instanceof Broadcastable) {
Broadcastable b = (Broadcastable) response.getEntity();
bc = b.getBroadcaster();
response.setEntity(b.getResponseMessage());
}
if ((localScope == Suspend.SCOPE.REQUEST) && bc == null) {
if (bc == null) {
try {
String id = servletReq.getHeader(X_ATMOSPHERE_TRACKING_ID);
<<<<<<< MINE
if (id == null) {
=======
if (id == null){
>>>>>>> YOURS
id = UUID.randomUUID().toString();
}
bc = broadcasterFactory.get(id);
bc.setScope(Broadcaster.SCOPE.REQUEST);
} catch (Exception ex) {
logger.error("failed to instantiate broadcaster with factory: " + broadcasterFactory, ex);
}
} else {
bc.setScope(Broadcaster.SCOPE.REQUEST);
}
}
r.setBroadcaster(bc);
if (resumeOnBroadcast) {
servletReq.setAttribute(ApplicationConfig.RESUME_ON_BROADCAST, new Boolean(true));
}
executeSuspend(r, timeout, comments, resumeOnBroadcast, location, request, response, true);
}
void executeSuspend(AtmosphereResource r,
long timeout,
boolean comments,
boolean resumeOnBroadcast,
URI location,
ContainerRequest request,
ContainerResponse response,
boolean flushEntity) {
boolean sessionSupported = (Boolean) servletReq.getAttribute(FrameworkConfig.SUPPORT_SESSION);
configureFilter(r.getBroadcaster());
if (sessionSupported) {
servletReq.getSession().setAttribute(SUSPENDED_RESOURCE, r);
servletReq.getSession().setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
}
servletReq.setAttribute(SUSPENDED_RESOURCE, r);
servletReq.setAttribute(FrameworkConfig.CONTAINER_RESPONSE, response);
logger.debug("Linked HttpServletRequest {} with ContainerResponse {}", servletReq, response);
// Set the content-type based on the returned entity.
try {
MediaType contentType = response.getMediaType();
if (contentType == null && response.getEntity() != null) {
LinkedList<MediaType> l = new LinkedList<MediaType>();
// Will retrun the first
l.add(request.getAcceptableMediaType(new LinkedList<MediaType>()));
contentType = response.getMessageBodyWorkers().getMessageBodyWriterMediaType(
response.getEntity().getClass(),
response.getEntityType(),
response.getAnnotations(),
l);
if (contentType == null ||
contentType.isWildcardType() || contentType.isWildcardSubtype())
contentType = MediaType.APPLICATION_OCTET_STREAM_TYPE;
}
Object entity = response.getEntity();
Response.ResponseBuilder b = Response.ok();
b = configureHeaders(b);
if (entity != null) {
b = b.header("Content-Type", contentType != null ?
contentType.toString() : "text/html; charset=ISO-8859-1");
servletReq.setAttribute(FrameworkConfig.EXPECTED_CONTENT_TYPE, contentType.toString());
}
boolean eclipse362468 = false;
String serverInfo = r.getAtmosphereConfig().getServletContext().getServerInfo();
if (serverInfo.indexOf("jetty") != -1) {
String[] jettyVersion = serverInfo.substring(6).split("\\.");
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468
eclipse362468 = ((Integer.valueOf(jettyVersion[0]) == 8 && Integer.valueOf(jettyVersion[1]) == 0 && Integer.valueOf(jettyVersion[2]) > 1))
|| ((Integer.valueOf(jettyVersion[0]) == 7 && Integer.valueOf(jettyVersion[1]) == 5 && Integer.valueOf(jettyVersion[2]) == 4));
if (comments && eclipse362468) {
logger.debug("Padding response is disabled to workaround https://bugs.eclipse.org/bugs/show_bug.cgi?id=362468");
}
}
if (!eclipse362468 && comments && !resumeOnBroadcast) {
String padding = (String) servletReq.getAttribute(ApplicationConfig.STREAMING_PADDING_MODE);
String paddingData = AtmosphereResourceImpl.createStreamingPadding(padding);
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
location = null;
}
response.setResponse(b.entity(paddingData).build());
response.write();
}
if (entity != null && flushEntity) {
try {
if (Callable.class.isAssignableFrom(entity.getClass())) {
entity = Callable.class.cast(entity).call();
}
} catch (Throwable t) {
logger.error("Error executing callable {}", entity);
entity = null;
}
if (location != null) {
b = b.header(HttpHeaders.LOCATION, location);
}
response.setResponse(b.entity(entity).build());
response.write();
}
response.setEntity(null);
r.suspend(timeout, false);
} catch (IOException ex) {
throw new WebApplicationException(ex);
}
}
}
/**
* Create a {@link ResourceFilter} which contains the information about the
* annotation being processed.
* <p/>
* XXX Need to filter invalid mix of annotation.
*
* @param am an {@link AbstractMethod}
* @return a List of {@link ResourceFilter} to invoke.
*/
public List<ResourceFilter> create(AbstractMethod am) {
LinkedList<ResourceFilter> list = new LinkedList<ResourceFilter>();
Filter f;
if (logger.isDebugEnabled()) {
for (Annotation annotation : am.getAnnotations()) {
logger.debug("AtmosphereFilter processing annotation: {}", annotation);
}
}
if (am.getMethod() == null) {
return null;
}
if (SuspendResponse.class.isAssignableFrom(am.getMethod().getReturnType())) {
list.addLast(new Filter(Action.SUSPEND_RESPONSE));
return list;
}
if (am.isAnnotationPresent(Broadcast.class)) {
int delay = am.getAnnotation(Broadcast.class).delay();
Class[] broadcastFilter = am.getAnnotation(Broadcast.class).value();
if (am.getAnnotation(Broadcast.class).resumeOnBroadcast()) {
f = new Filter(Action.RESUME_ON_BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
} else {
f = new Filter(Action.BROADCAST, delay, 0, Suspend.SCOPE.APPLICATION, true, broadcastFilter, null);
}
list.addLast(f);
if (am.isAnnotationPresent(Cluster.class)) {
broadcastFilter = am.getAnnotation(Cluster.class).value();
for (Class<ClusterBroadcastFilter> c : broadcastFilter) {
try {
ClusterBroadcastFilter cbf = c.newInstance();
InjectorProvider.getInjector().inject(cbf);
cbf.setUri(am.getAnnotation(Cluster.class).name());
f.addCluster(cbf);
} catch (Throwable t) {
logger.warn("Invalid ClusterBroadcastFilter", t);
}
}
}
}
if (am.isAnnotationPresent(Asynchronous.class)) {
int suspendTimeout = am.getAnnotation(Asynchronous.class).period();
Class[] broadcastFilter = am.getAnnotation(Asynchronous.class).broadcastFilter();
boolean wait = am.getAnnotation(Asynchronous.class).waitForResource();
f = new Filter(Action.ASYNCHRONOUS, suspendTimeout, wait ? -1 : 0, null, false, broadcastFilter, am.getAnnotation(Asynchronous.class).header());
f.setListeners(am.getAnnotation(Asynchronous.class).eventListeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Suspend.class)) {
long suspendTimeout = am.getAnnotation(Suspend.class).period();
TimeUnit tu = am.getAnnotation(Suspend.class).timeUnit();
suspendTimeout = translateTimeUnit(suspendTimeout, tu);
Suspend.SCOPE scope = am.getAnnotation(Suspend.class).scope();
boolean outputComments = am.getAnnotation(Suspend.class).outputComments();
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
if (am.getAnnotation(Suspend.class).resumeOnBroadcast()) {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND_RESUME, suspendTimeout, 0, scope, outputComments);
} else {
f = new Filter(trackable ? Action.SUSPEND_TRACKABLE : Action.SUSPEND, suspendTimeout, 0, scope, outputComments);
}
f.setListeners(am.getAnnotation(Suspend.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Subscribe.class)) {
boolean trackable = false;
if (TrackableResource.class.isAssignableFrom(am.getMethod().getReturnType())) {
trackable = true;
}
f = new Filter(trackable ? Action.SUBSCRIBE_TRACKABLE : Action.SUBSCRIBE, 30000, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Subscribe.class).value());
f.setListeners(am.getAnnotation(Subscribe.class).listeners());
list.addFirst(f);
}
if (am.isAnnotationPresent(Publish.class)) {
f = new Filter(Action.PUBLISH, -1, -1, Suspend.SCOPE.APPLICATION,
false, null, am.getAnnotation(Publish.class).value());
list.addFirst(f);
}
if (am.isAnnotationPresent(Resume.class)) {
useResumeAnnotation = true;
int suspendTimeout = am.getAnnotation(Resume.class).value();
list.addFirst(new Filter(Action.RESUME, suspendTimeout));
}
if (am.isAnnotationPresent(Schedule.class)) {
int period = am.getAnnotation(Schedule.class).period();
int waitFor = am.getAnnotation(Schedule.class).waitFor();
if (am.getAnnotation(Schedule.class).resumeOnBroadcast()) {
list.addFirst(new Filter(Action.SCHEDULE_RESUME, period, waitFor));
} else {
list.addFirst(new Filter(Action.SCHEDULE, period, waitFor));
}
}
// Nothing, normal Jersey application.
return list.size() > 0 ? list : null;
}
private long translateTimeUnit(long period, TimeUnit tu) {
if (period == -1) return period;
switch (tu) {
case SECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.SECONDS);
case MINUTES:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MINUTES);
case HOURS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.HOURS);
case DAYS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.DAYS);
case MILLISECONDS:
return period;
case MICROSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.MICROSECONDS);
case NANOSECONDS:
return TimeUnit.MILLISECONDS.convert(period, TimeUnit.NANOSECONDS);
}
return period;
}
}
Diff Result
No diff
Case 2 - cassandra.rev_0f1fb_8b0e1.ColumnIndex.java
public Builder
Left modified signature and body. Also overloaded constructor.
Right modified signature and body
Unstructured reported conflict between signatures. Also reported conflict involving bodies and overloaded constructor.
Safe reported conflict on body of overloaded constructor
MergeMethods reported conflict on body of overloaded constructor
KeepBothMethods kept all three versions of constructor
Base
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
import org.apache.cassandra.utils.AlwaysPresentFilter;
import org.apache.cassandra.utils.IFilter;
import org.apache.cassandra.utils.FilterFactory;
public class ColumnIndex
{
public final List<IndexHelper.IndexInfo> columnsIndex;
public final IFilter bloomFilter;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList(), new AlwaysPresentFilter());
private ColumnIndex(int estimatedColumnCount)
{
this(new ArrayList<IndexHelper.IndexInfo>(), FilterFactory.getFilter(estimatedColumnCount, 4, false));
}
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex, IFilter bloomFilter)
{
this.columnsIndex = columnsIndex;
this.bloomFilter = bloomFilter;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder
{
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private final OnDiskAtom.Serializer atomSerializer;
private int atomCount;
public Builder(ColumnFamily cf,
ByteBuffer key,
int estimatedColumnCount,
DataOutput output)
{
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(estimatedColumnCount);
this.output = output;
this.atomSerializer = cf.getOnDiskSerializer();
this.tombstoneTracker = new RangeTombstone.Tracker(cf.getComparator());
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo)
{
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return typeSizes.sizeof((short) keysize) + keysize // Row key
+ typeSizes.sizeof(0L) // Row data size
+ DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes)
+ typeSizes.sizeof(0); // Column count
}
public RangeTombstone.Tracker tombstoneTracker()
{
return tombstoneTracker;
}
public int writtenAtomCount()
{
return atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException
{
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (IColumn c : cf)
{
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException
{
for (OnDiskAtom c : columns)
add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException
{
atomCount++;
if (column instanceof IColumn)
result.bloomFilter.add(column.name());
if (firstColumn == null)
{
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we
// optimize that on read
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
blockSize = 0; // We don't count repeated tombstone marker in the block size, to avoid a situation
// where we wouldn't make any problem because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize())
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build()
{
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn)
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
import org.apache.cassandra.utils.AlwaysPresentFilter;
import org.apache.cassandra.utils.IFilter;
import org.apache.cassandra.utils.FilterFactory;
public class ColumnIndex
{
public final List<IndexHelper.IndexInfo> columnsIndex;
public final IFilter bloomFilter;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList(), new AlwaysPresentFilter());
private ColumnIndex(int estimatedColumnCount)
{
this(new ArrayList<IndexHelper.IndexInfo>(), FilterFactory.getFilter(estimatedColumnCount, 4, false));
}
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex, IFilter bloomFilter)
{
this.columnsIndex = columnsIndex;
this.bloomFilter = bloomFilter;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder
{
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private final OnDiskAtom.Serializer atomSerializer;
private int atomCount;
public Builder(ColumnFamily cf,
ByteBuffer key,
int estimatedColumnCount,
DataOutput output)
{
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(estimatedColumnCount);
this.output = output;
this.atomSerializer = cf.getOnDiskSerializer();
this.tombstoneTracker = new RangeTombstone.Tracker(cf.getComparator());
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo)
{
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return typeSizes.sizeof((short) keysize) + keysize // Row key
+ typeSizes.sizeof(0L) // Row data size
+ DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes)
+ typeSizes.sizeof(0); // Column count
}
public RangeTombstone.Tracker tombstoneTracker()
{
return tombstoneTracker;
}
public int writtenAtomCount()
{
return atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException
{
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (IColumn c : cf)
{
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException
{
for (OnDiskAtom c : columns)
add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException
{
atomCount++;
if (column instanceof IColumn)
result.bloomFilter.add(column.name());
if (firstColumn == null)
{
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we
// optimize that on read
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
blockSize = 0; // We don't count repeated tombstone marker in the block size, to avoid a situation
// where we wouldn't make any problem because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize())
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build()
{
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn)
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}
Left
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
import org.apache.cassandra.utils.AlwaysPresentFilter;
import org.apache.cassandra.utils.IFilter;
import org.apache.cassandra.utils.FilterFactory;
public class ColumnIndex
{
public final List<IndexHelper.IndexInfo> columnsIndex;
public final IFilter bloomFilter;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList(), new AlwaysPresentFilter());
private ColumnIndex(int estimatedColumnCount)
{
this(new ArrayList<IndexHelper.IndexInfo>(), FilterFactory.getFilter(estimatedColumnCount, 4, false));
}
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex, IFilter bloomFilter)
{
this.columnsIndex = columnsIndex;
this.bloomFilter = bloomFilter;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder
{
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private final OnDiskAtom.Serializer atomSerializer;
private int atomCount;
public Builder(ColumnFamily cf,
ByteBuffer key,
int estimatedColumnCount,
DataOutput output,
boolean fromStream)
{
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(estimatedColumnCount);
this.output = output;
this.atomSerializer = cf.getOnDiskSerializer();
this.tombstoneTracker = fromStream ? null : new RangeTombstone.Tracker(cf.getComparator());
}
public Builder(ColumnFamily cf,
ByteBuffer key,
int estimatedColumnCount,
DataOutput output)
{
this(cf, key, estimatedColumnCount, output, false);
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo)
{
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return typeSizes.sizeof((short) keysize) + keysize // Row key
+ typeSizes.sizeof(0L) // Row data size
+ DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes)
+ typeSizes.sizeof(0); // Column count
}
public RangeTombstone.Tracker tombstoneTracker()
{
return tombstoneTracker;
}
public int writtenAtomCount()
{
return tombstoneTracker == null ? atomCount : atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException
{
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (IColumn c : cf)
{
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException
{
for (OnDiskAtom c : columns)
add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException
{
atomCount++;
if (column instanceof IColumn)
result.bloomFilter.add(column.name());
if (firstColumn == null)
{
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we optimize that on read
if (tombstoneTracker != null)
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
blockSize = 0; // We don't count repeated tombstone marker in the block size, to avoid a situation
// where we wouldn't make any progress because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize())
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
if (tombstoneTracker != null)
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build()
{
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn)
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
import org.apache.cassandra.utils.AlwaysPresentFilter;
import org.apache.cassandra.utils.IFilter;
import org.apache.cassandra.utils.FilterFactory;
public class ColumnIndex
{
public final List<IndexHelper.IndexInfo> columnsIndex;
public final IFilter bloomFilter;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList(), new AlwaysPresentFilter());
private ColumnIndex(int estimatedColumnCount)
{
this(new ArrayList<IndexHelper.IndexInfo>(), FilterFactory.getFilter(estimatedColumnCount, 4, false));
}
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex, IFilter bloomFilter)
{
this.columnsIndex = columnsIndex;
this.bloomFilter = bloomFilter;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder
{
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private final OnDiskAtom.Serializer atomSerializer;
private int atomCount;
public Builder(ColumnFamily cf,
ByteBuffer key,
int estimatedColumnCount,
DataOutput output,
boolean fromStream)
{
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(estimatedColumnCount);
this.output = output;
this.atomSerializer = cf.getOnDiskSerializer();
this.tombstoneTracker = fromStream ? null : new RangeTombstone.Tracker(cf.getComparator());
}
public Builder(ColumnFamily cf,
ByteBuffer key,
int estimatedColumnCount,
DataOutput output)
{
this(cf, key, estimatedColumnCount, output, false);
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo)
{
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return typeSizes.sizeof((short) keysize) + keysize // Row key
+ typeSizes.sizeof(0L) // Row data size
+ DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes)
+ typeSizes.sizeof(0); // Column count
}
public RangeTombstone.Tracker tombstoneTracker()
{
return tombstoneTracker;
}
public int writtenAtomCount()
{
return tombstoneTracker == null ? atomCount : atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException
{
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (IColumn c : cf)
{
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException
{
for (OnDiskAtom c : columns)
add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException
{
atomCount++;
if (column instanceof IColumn)
result.bloomFilter.add(column.name());
if (firstColumn == null)
{
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we optimize that on read
if (tombstoneTracker != null)
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
blockSize = 0; // We don't count repeated tombstone marker in the block size, to avoid a situation
// where we wouldn't make any progress because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize())
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
if (tombstoneTracker != null)
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build()
{
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn)
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}
Right
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
public class ColumnIndex
{
public final List<IndexHelper.IndexInfo> columnsIndex;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList());
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex)
{
this.columnsIndex = columnsIndex;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder
{
private static final OnDiskAtom.Serializer atomSerializer = Column.onDiskSerializer();
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private int atomCount;
public Builder(ColumnFamily cf,
ByteBuffer key,
DataOutput output)
{
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(new ArrayList<IndexHelper.IndexInfo>());
this.output = output;
this.tombstoneTracker = new RangeTombstone.Tracker(cf.getComparator());
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo)
{
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return typeSizes.sizeof((short) keysize) + keysize // Row key
+ typeSizes.sizeof(0L) // Row data size
+ DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes)
+ typeSizes.sizeof(0); // Column count
}
public RangeTombstone.Tracker tombstoneTracker()
{
return tombstoneTracker;
}
public int writtenAtomCount()
{
return atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException
{
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (Column c : cf)
{
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException
{
for (OnDiskAtom c : columns)
add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException
{
atomCount++;
if (firstColumn == null)
{
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we
// optimize that on read
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
blockSize = 0; // We don't count repeated tombstone marker in the block size, to avoid a situation
// where we wouldn't make any problem because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize())
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build()
{
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn)
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
public class ColumnIndex
{
public final List<IndexHelper.IndexInfo> columnsIndex;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList());
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex)
{
this.columnsIndex = columnsIndex;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder
{
private static final OnDiskAtom.Serializer atomSerializer = Column.onDiskSerializer();
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private int atomCount;
public Builder(ColumnFamily cf,
ByteBuffer key,
DataOutput output)
{
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(new ArrayList<IndexHelper.IndexInfo>());
this.output = output;
this.tombstoneTracker = new RangeTombstone.Tracker(cf.getComparator());
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo)
{
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return typeSizes.sizeof((short) keysize) + keysize // Row key
+ typeSizes.sizeof(0L) // Row data size
+ DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes)
+ typeSizes.sizeof(0); // Column count
}
public RangeTombstone.Tracker tombstoneTracker()
{
return tombstoneTracker;
}
public int writtenAtomCount()
{
return atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException
{
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (Column c : cf)
{
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException
{
for (OnDiskAtom c : columns)
add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException
{
atomCount++;
if (firstColumn == null)
{
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we
// optimize that on read
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
blockSize = 0; // We don't count repeated tombstone marker in the block size, to avoid a situation
// where we wouldn't make any problem because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize())
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build()
{
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn)
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}
MergeMethods
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
public class ColumnIndex {
public final List<IndexHelper.IndexInfo> columnsIndex;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList());
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex)
{
this.columnsIndex = columnsIndex;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder {
private static final OnDiskAtom.Serializer atomSerializer = Column.onDiskSerializer();
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private int atomCount;
public Builder(ColumnFamily cf,
ByteBuffer key,
int estimatedColumnCount,
DataOutput output,
boolean fromStream)
{
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(estimatedColumnCount);
this.output = output;
this.atomSerializer = cf.getOnDiskSerializer();
this.tombstoneTracker = fromStream ? null : new RangeTombstone.Tracker(cf.getComparator());
}
public Builder(ColumnFamily cf,
ByteBuffer key,
DataOutput output)
{
<<<<<<< MINE
this(cf, key, estimatedColumnCount, output, false);
=======
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(new ArrayList<IndexHelper.IndexInfo>());
this.output = output;
this.tombstoneTracker = new RangeTombstone.Tracker(cf.getComparator());
>>>>>>> YOURS
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo)
{
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return typeSizes.sizeof((short) keysize) + keysize // Row key
+ typeSizes.sizeof(0L) // Row data size
+ DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes)
+ typeSizes.sizeof(0); // Column count
}
public RangeTombstone.Tracker tombstoneTracker()
{
return tombstoneTracker;
}
public int writtenAtomCount()
{
return tombstoneTracker == null ? atomCount : atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException
{
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (Column c : cf)
{
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException
{
for (OnDiskAtom c : columns)
add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException
{
atomCount++;
if (firstColumn == null)
{
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we optimize that on read
if (tombstoneTracker != null)
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
blockSize = 0; // We don't count repeated tombstone marker in the block size, to avoid a situation
// where we wouldn't make any progress because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize())
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
if (tombstoneTracker != null)
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build()
{
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn)
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
public class ColumnIndex {
public final List<IndexHelper.IndexInfo> columnsIndex;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList());
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex)
{
this.columnsIndex = columnsIndex;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder {
private static final OnDiskAtom.Serializer atomSerializer = Column.onDiskSerializer();
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private int atomCount;
public Builder(ColumnFamily cf,
ByteBuffer key,
int estimatedColumnCount,
DataOutput output,
boolean fromStream)
{
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(estimatedColumnCount);
this.output = output;
this.atomSerializer = cf.getOnDiskSerializer();
this.tombstoneTracker = fromStream ? null : new RangeTombstone.Tracker(cf.getComparator());
}
public Builder(ColumnFamily cf,
ByteBuffer key,
DataOutput output)
{
<<<<<<< MINE
this(cf, key, estimatedColumnCount, output, false);
=======
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(new ArrayList<IndexHelper.IndexInfo>());
this.output = output;
this.tombstoneTracker = new RangeTombstone.Tracker(cf.getComparator());
>>>>>>> YOURS
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo)
{
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return typeSizes.sizeof((short) keysize) + keysize // Row key
+ typeSizes.sizeof(0L) // Row data size
+ DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes)
+ typeSizes.sizeof(0); // Column count
}
public RangeTombstone.Tracker tombstoneTracker()
{
return tombstoneTracker;
}
public int writtenAtomCount()
{
return tombstoneTracker == null ? atomCount : atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException
{
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (Column c : cf)
{
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException
{
for (OnDiskAtom c : columns)
add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException
{
atomCount++;
if (firstColumn == null)
{
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we optimize that on read
if (tombstoneTracker != null)
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
blockSize = 0; // We don't count repeated tombstone marker in the block size, to avoid a situation
// where we wouldn't make any progress because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize())
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
if (tombstoneTracker != null)
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build()
{
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn)
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}
KeepBothMethods
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
public class ColumnIndex {
public final List<IndexHelper.IndexInfo> columnsIndex;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList());
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex) {
this.columnsIndex = columnsIndex;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder {
private static final OnDiskAtom.Serializer atomSerializer = Column.onDiskSerializer();
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private int atomCount;
public Builder(ColumnFamily cf, ByteBuffer key, int estimatedColumnCount, DataOutput output, boolean fromStream) {
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(estimatedColumnCount);
this.output = output;
this.atomSerializer = cf.getOnDiskSerializer();
this.tombstoneTracker = fromStream ? null : new RangeTombstone.Tracker(cf.getComparator());
}
public Builder(ColumnFamily cf, ByteBuffer key, int estimatedColumnCount, DataOutput output) {
this(cf, key, estimatedColumnCount, output, false);
}
public Builder(ColumnFamily cf, ByteBuffer key, DataOutput output) {
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(new ArrayList<IndexHelper.IndexInfo>());
this.output = output;
this.tombstoneTracker = new RangeTombstone.Tracker(cf.getComparator());
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo) {
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return // Row key
typeSizes.sizeof((short) keysize) + keysize + // Row data size
typeSizes.sizeof(0L) + DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes) + // Column count
typeSizes.sizeof(0);
}
public RangeTombstone.Tracker tombstoneTracker() {
return tombstoneTracker;
}
public int writtenAtomCount() {
return tombstoneTracker == null ? atomCount : atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException {
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (Column c : cf) {
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0) {
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null) {
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException {
for (OnDiskAtom c : columns) add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException {
atomCount++;
if (firstColumn == null) {
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we optimize that on read
if (tombstoneTracker != null)
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
// We don't count repeated tombstone marker in the block size, to avoid a situation
blockSize = 0;
// where we wouldn't make any progress because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize()) {
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
if (tombstoneTracker != null)
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build() {
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn) {
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
public class ColumnIndex {
public final List<IndexHelper.IndexInfo> columnsIndex;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList());
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex) {
this.columnsIndex = columnsIndex;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder {
private static final OnDiskAtom.Serializer atomSerializer = Column.onDiskSerializer();
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private int atomCount;
public Builder(ColumnFamily cf, ByteBuffer key, int estimatedColumnCount, DataOutput output, boolean fromStream) {
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(estimatedColumnCount);
this.output = output;
this.atomSerializer = cf.getOnDiskSerializer();
this.tombstoneTracker = fromStream ? null : new RangeTombstone.Tracker(cf.getComparator());
}
public Builder(ColumnFamily cf, ByteBuffer key, int estimatedColumnCount, DataOutput output) {
this(cf, key, estimatedColumnCount, output, false);
}
public Builder(ColumnFamily cf, ByteBuffer key, DataOutput output) {
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(new ArrayList<IndexHelper.IndexInfo>());
this.output = output;
this.tombstoneTracker = new RangeTombstone.Tracker(cf.getComparator());
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo) {
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return // Row key
typeSizes.sizeof((short) keysize) + keysize + // Row data size
typeSizes.sizeof(0L) + DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes) + // Column count
typeSizes.sizeof(0);
}
public RangeTombstone.Tracker tombstoneTracker() {
return tombstoneTracker;
}
public int writtenAtomCount() {
return tombstoneTracker == null ? atomCount : atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException {
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (Column c : cf) {
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0) {
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null) {
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException {
for (OnDiskAtom c : columns) add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException {
atomCount++;
if (firstColumn == null) {
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we optimize that on read
if (tombstoneTracker != null)
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
// We don't count repeated tombstone marker in the block size, to avoid a situation
blockSize = 0;
// where we wouldn't make any progress because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize()) {
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
if (tombstoneTracker != null)
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build() {
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn) {
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}
Safe
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
public class ColumnIndex {
public final List<IndexHelper.IndexInfo> columnsIndex;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList());
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex)
{
this.columnsIndex = columnsIndex;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder {
private static final OnDiskAtom.Serializer atomSerializer = Column.onDiskSerializer();
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private int atomCount;
public Builder(ColumnFamily cf,
ByteBuffer key,
int estimatedColumnCount,
DataOutput output,
boolean fromStream)
{
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(estimatedColumnCount);
this.output = output;
this.atomSerializer = cf.getOnDiskSerializer();
this.tombstoneTracker = fromStream ? null : new RangeTombstone.Tracker(cf.getComparator());
}
public Builder(ColumnFamily cf,
ByteBuffer key,
DataOutput output)
{
<<<<<<< MINE
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(new ArrayList<IndexHelper.IndexInfo>());
this.output = output;
this.tombstoneTracker = new RangeTombstone.Tracker(cf.getComparator());
=======
this(cf, key, estimatedColumnCount, output, false);
>>>>>>> YOURS
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo)
{
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return typeSizes.sizeof((short) keysize) + keysize // Row key
+ typeSizes.sizeof(0L) // Row data size
+ DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes)
+ typeSizes.sizeof(0); // Column count
}
public RangeTombstone.Tracker tombstoneTracker()
{
return tombstoneTracker;
}
public int writtenAtomCount()
{
return tombstoneTracker == null ? atomCount : atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException
{
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (Column c : cf)
{
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException
{
for (OnDiskAtom c : columns)
add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException
{
atomCount++;
if (firstColumn == null)
{
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we optimize that on read
if (tombstoneTracker != null)
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
blockSize = 0; // We don't count repeated tombstone marker in the block size, to avoid a situation
// where we wouldn't make any progress because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize())
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
if (tombstoneTracker != null)
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build()
{
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn)
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
public class ColumnIndex {
public final List<IndexHelper.IndexInfo> columnsIndex;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList());
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex)
{
this.columnsIndex = columnsIndex;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder {
private static final OnDiskAtom.Serializer atomSerializer = Column.onDiskSerializer();
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private int atomCount;
public Builder(ColumnFamily cf,
ByteBuffer key,
int estimatedColumnCount,
DataOutput output,
boolean fromStream)
{
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(estimatedColumnCount);
this.output = output;
this.atomSerializer = cf.getOnDiskSerializer();
this.tombstoneTracker = fromStream ? null : new RangeTombstone.Tracker(cf.getComparator());
}
public Builder(ColumnFamily cf,
ByteBuffer key,
DataOutput output)
{
<<<<<<< MINE
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(new ArrayList<IndexHelper.IndexInfo>());
this.output = output;
this.tombstoneTracker = new RangeTombstone.Tracker(cf.getComparator());
=======
this(cf, key, estimatedColumnCount, output, false);
>>>>>>> YOURS
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo)
{
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return typeSizes.sizeof((short) keysize) + keysize // Row key
+ typeSizes.sizeof(0L) // Row data size
+ DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes)
+ typeSizes.sizeof(0); // Column count
}
public RangeTombstone.Tracker tombstoneTracker()
{
return tombstoneTracker;
}
public int writtenAtomCount()
{
return tombstoneTracker == null ? atomCount : atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException
{
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (Column c : cf)
{
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException
{
for (OnDiskAtom c : columns)
add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException
{
atomCount++;
if (firstColumn == null)
{
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we optimize that on read
if (tombstoneTracker != null)
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
blockSize = 0; // We don't count repeated tombstone marker in the block size, to avoid a situation
// where we wouldn't make any progress because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize())
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
if (tombstoneTracker != null)
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build()
{
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn)
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}
Unstructured
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
public class ColumnIndex
{
public final List<IndexHelper.IndexInfo> columnsIndex;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList());
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex)
{
this.columnsIndex = columnsIndex;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder
{
private static final OnDiskAtom.Serializer atomSerializer = Column.onDiskSerializer();
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private int atomCount;
public Builder(ColumnFamily cf,
ByteBuffer key,
<<<<<<< MINE
int estimatedColumnCount,
DataOutput output,
boolean fromStream)
=======
DataOutput output)
>>>>>>> YOURS
{
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(new ArrayList<IndexHelper.IndexInfo>());
this.output = output;
<<<<<<< MINE
this.atomSerializer = cf.getOnDiskSerializer();
this.tombstoneTracker = fromStream ? null : new RangeTombstone.Tracker(cf.getComparator());
}
public Builder(ColumnFamily cf,
ByteBuffer key,
int estimatedColumnCount,
DataOutput output)
{
this(cf, key, estimatedColumnCount, output, false);
=======
this.tombstoneTracker = new RangeTombstone.Tracker(cf.getComparator());
>>>>>>> YOURS
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo)
{
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return typeSizes.sizeof((short) keysize) + keysize // Row key
+ typeSizes.sizeof(0L) // Row data size
+ DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes)
+ typeSizes.sizeof(0); // Column count
}
public RangeTombstone.Tracker tombstoneTracker()
{
return tombstoneTracker;
}
public int writtenAtomCount()
{
return tombstoneTracker == null ? atomCount : atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException
{
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (Column c : cf)
{
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException
{
for (OnDiskAtom c : columns)
add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException
{
atomCount++;
if (firstColumn == null)
{
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we optimize that on read
if (tombstoneTracker != null)
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
blockSize = 0; // We don't count repeated tombstone marker in the block size, to avoid a situation
// where we wouldn't make any progress because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize())
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
if (tombstoneTracker != null)
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build()
{
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn)
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.DataOutput;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.io.sstable.IndexHelper;
public class ColumnIndex
{
public final List<IndexHelper.IndexInfo> columnsIndex;
private static final ColumnIndex EMPTY = new ColumnIndex(Collections.<IndexHelper.IndexInfo>emptyList());
private ColumnIndex(List<IndexHelper.IndexInfo> columnsIndex)
{
this.columnsIndex = columnsIndex;
}
/**
* Help to create an index for a column family based on size of columns,
* and write said columns to disk.
*/
public static class Builder
{
private static final OnDiskAtom.Serializer atomSerializer = Column.onDiskSerializer();
private final ColumnIndex result;
private final long indexOffset;
private long startPosition = -1;
private long endPosition = 0;
private long blockSize;
private OnDiskAtom firstColumn;
private OnDiskAtom lastColumn;
private OnDiskAtom lastBlockClosing;
private final DataOutput output;
private final RangeTombstone.Tracker tombstoneTracker;
private int atomCount;
public Builder(ColumnFamily cf,
ByteBuffer key,
<<<<<<< MINE
int estimatedColumnCount,
DataOutput output,
boolean fromStream)
=======
DataOutput output)
>>>>>>> YOURS
{
this.indexOffset = rowHeaderSize(key, cf.deletionInfo());
this.result = new ColumnIndex(new ArrayList<IndexHelper.IndexInfo>());
this.output = output;
<<<<<<< MINE
this.atomSerializer = cf.getOnDiskSerializer();
this.tombstoneTracker = fromStream ? null : new RangeTombstone.Tracker(cf.getComparator());
}
public Builder(ColumnFamily cf,
ByteBuffer key,
int estimatedColumnCount,
DataOutput output)
{
this(cf, key, estimatedColumnCount, output, false);
=======
this.tombstoneTracker = new RangeTombstone.Tracker(cf.getComparator());
>>>>>>> YOURS
}
/**
* Returns the number of bytes between the beginning of the row and the
* first serialized column.
*/
private static long rowHeaderSize(ByteBuffer key, DeletionInfo delInfo)
{
TypeSizes typeSizes = TypeSizes.NATIVE;
// TODO fix constantSize when changing the nativeconststs.
int keysize = key.remaining();
return typeSizes.sizeof((short) keysize) + keysize // Row key
+ typeSizes.sizeof(0L) // Row data size
+ DeletionTime.serializer.serializedSize(delInfo.getTopLevelDeletion(), typeSizes)
+ typeSizes.sizeof(0); // Column count
}
public RangeTombstone.Tracker tombstoneTracker()
{
return tombstoneTracker;
}
public int writtenAtomCount()
{
return tombstoneTracker == null ? atomCount : atomCount + tombstoneTracker.writtenAtom();
}
/**
* Serializes the index into in-memory structure with all required components
* such as Bloom Filter, index block size, IndexInfo list
*
* @param cf Column family to create index for
*
* @return information about index - it's Bloom Filter, block size and IndexInfo list
*/
public ColumnIndex build(ColumnFamily cf) throws IOException
{
Iterator<RangeTombstone> rangeIter = cf.deletionInfo().rangeIterator();
RangeTombstone tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
Comparator<ByteBuffer> comparator = cf.getComparator();
for (Column c : cf)
{
while (tombstone != null && comparator.compare(c.name(), tombstone.min) >= 0)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
add(c);
}
while (tombstone != null)
{
add(tombstone);
tombstone = rangeIter.hasNext() ? rangeIter.next() : null;
}
return build();
}
public ColumnIndex build(Iterable<OnDiskAtom> columns) throws IOException
{
for (OnDiskAtom c : columns)
add(c);
return build();
}
public void add(OnDiskAtom column) throws IOException
{
atomCount++;
if (firstColumn == null)
{
firstColumn = column;
startPosition = endPosition;
// TODO: have that use the firstColumn as min + make sure we optimize that on read
if (tombstoneTracker != null)
endPosition += tombstoneTracker.writeOpenedMarker(firstColumn, output, atomSerializer);
blockSize = 0; // We don't count repeated tombstone marker in the block size, to avoid a situation
// where we wouldn't make any progress because a block is filled by said marker
}
long size = column.serializedSizeForSSTable();
endPosition += size;
blockSize += size;
// if we hit the column index size that we have to index after, go ahead and index it.
if (blockSize >= DatabaseDescriptor.getColumnIndexSize())
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), column.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
firstColumn = null;
lastBlockClosing = column;
}
if (output != null)
atomSerializer.serializeForSSTable(column, output);
// TODO: Should deal with removing unneeded tombstones
if (tombstoneTracker != null)
tombstoneTracker.update(column);
lastColumn = column;
}
public ColumnIndex build()
{
// all columns were GC'd after all
if (lastColumn == null)
return ColumnIndex.EMPTY;
// the last column may have fallen on an index boundary already. if not, index it explicitly.
if (result.columnsIndex.isEmpty() || lastBlockClosing != lastColumn)
{
IndexHelper.IndexInfo cIndexInfo = new IndexHelper.IndexInfo(firstColumn.name(), lastColumn.name(), indexOffset + startPosition, endPosition - startPosition);
result.columnsIndex.add(cIndexInfo);
}
// we should always have at least one computed index block, but we only write it out if there is more than that.
assert result.columnsIndex.size() > 0;
return result;
}
}
}
Diff Result
No diff
Case 3 - cassandra.rev_2ce7b_e863c.AlterTableStatement.java
AlterTableStatement
Left modified body and replaced one parameter
Right modified body and replaced two parameters
Unstructured reported conflict on signatures
Safe kept both versions
MergeMethods reported conflict of whole methods
KeepBothMethods kept both versions
Base
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.util.*;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.config.*;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.CompositeType;
import org.apache.cassandra.db.marshal.CounterColumnType;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import org.apache.cassandra.thrift.InvalidRequestException;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement
{
public static enum Type
{
ADD, ALTER, DROP, OPTS
}
public final Type oType;
public final String validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps = new CFPropDefs();
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, String validator, Map<String, String> propertyMap)
{
super(name);
this.oType = type;
this.columnName = columnName;
this.validator = validator; // used only for ADD/ALTER commands
this.cfProps.addAll(propertyMap);
}
public void checkAccess(ClientState state) throws InvalidRequestException
{
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws InvalidRequestException, ConfigurationException
{
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch (oType)
{
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null)
{
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key,
CFPropDefs.parseType(validator),
null,
null,
null,
cfDef.isComposite ? ((CompositeType)meta.comparator).types.size() - 1 : null));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
AbstractType<?> newType = CFPropDefs.parseType(validator);
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
cfm.keyValidator(newType);
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, CFPropDefs.parseType(validator));
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(CFPropDefs.parseType(validator));
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(CFPropDefs.parseType(validator));
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values())
{
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString()
{
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)",
cfName,
oType,
columnName,
validator);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.util.*;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.config.*;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.CompositeType;
import org.apache.cassandra.db.marshal.CounterColumnType;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import org.apache.cassandra.thrift.InvalidRequestException;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement
{
public static enum Type
{
ADD, ALTER, DROP, OPTS
}
public final Type oType;
public final String validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps = new CFPropDefs();
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, String validator, Map<String, String> propertyMap)
{
super(name);
this.oType = type;
this.columnName = columnName;
this.validator = validator; // used only for ADD/ALTER commands
this.cfProps.addAll(propertyMap);
}
public void checkAccess(ClientState state) throws InvalidRequestException
{
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws InvalidRequestException, ConfigurationException
{
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch (oType)
{
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null)
{
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key,
CFPropDefs.parseType(validator),
null,
null,
null,
cfDef.isComposite ? ((CompositeType)meta.comparator).types.size() - 1 : null));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
AbstractType<?> newType = CFPropDefs.parseType(validator);
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
cfm.keyValidator(newType);
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, CFPropDefs.parseType(validator));
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(CFPropDefs.parseType(validator));
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(CFPropDefs.parseType(validator));
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values())
{
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString()
{
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)",
cfName,
oType,
columnName,
validator);
}
}
Left
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.util.*;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.config.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import org.apache.cassandra.thrift.InvalidRequestException;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement
{
public static enum Type
{
ADD, ALTER, DROP, OPTS
}
public final Type oType;
public final String validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps;
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, String validator, CFPropDefs cfProps)
{
super(name);
this.oType = type;
this.columnName = columnName;
this.validator = validator; // used only for ADD/ALTER commands
this.cfProps = cfProps;
}
public void checkAccess(ClientState state) throws InvalidRequestException
{
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws InvalidRequestException, ConfigurationException
{
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch (oType)
{
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null)
{
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key,
CFPropDefs.parseType(validator),
null,
null,
null,
cfDef.isComposite ? ((CompositeType)meta.comparator).types.size() - 1 : null));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
AbstractType<?> newType = CFPropDefs.parseType(validator);
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
cfm.keyValidator(newType);
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, CFPropDefs.parseType(validator));
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(CFPropDefs.parseType(validator));
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(CFPropDefs.parseType(validator));
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values())
{
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString()
{
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)",
cfName,
oType,
columnName,
validator);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.util.*;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.config.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import org.apache.cassandra.thrift.InvalidRequestException;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement
{
public static enum Type
{
ADD, ALTER, DROP, OPTS
}
public final Type oType;
public final String validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps;
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, String validator, CFPropDefs cfProps)
{
super(name);
this.oType = type;
this.columnName = columnName;
this.validator = validator; // used only for ADD/ALTER commands
this.cfProps = cfProps;
}
public void checkAccess(ClientState state) throws InvalidRequestException
{
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws InvalidRequestException, ConfigurationException
{
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch (oType)
{
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null)
{
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key,
CFPropDefs.parseType(validator),
null,
null,
null,
cfDef.isComposite ? ((CompositeType)meta.comparator).types.size() - 1 : null));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
AbstractType<?> newType = CFPropDefs.parseType(validator);
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
cfm.keyValidator(newType);
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, CFPropDefs.parseType(validator));
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(CFPropDefs.parseType(validator));
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(CFPropDefs.parseType(validator));
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values())
{
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString()
{
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)",
cfName,
oType,
columnName,
validator);
}
}
Right
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement
{
public static enum Type
{
ADD, ALTER, DROP, OPTS
}
public final Type oType;
public final ParsedType validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps;
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, ParsedType validator, CFPropDefs cfProps)
{
super(name);
this.oType = type;
this.columnName = columnName;
this.validator = validator; // used only for ADD/ALTER commands
this.cfProps = cfProps;
}
public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException
{
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws RequestValidationException
{
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch (oType)
{
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null)
{
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
Integer componentIndex = cfDef.isComposite ? ((CompositeType)meta.comparator).types.size() - 1 : null;
AbstractType<?> type = validator.getType();
if (type instanceof CollectionType)
{
if (!cfDef.isComposite)
throw new InvalidRequestException("Cannot use collection types with non-composite PRIMARY KEY");
componentIndex--;
Map<ByteBuffer, CollectionType> collections = cfDef.hasCollections
? new HashMap<ByteBuffer, CollectionType>(cfDef.getCollectionType().defined)
: new HashMap<ByteBuffer, CollectionType>();
collections.put(columnName.key, (CollectionType)type);
ColumnToCollectionType newColType = ColumnToCollectionType.getInstance(collections);
List<AbstractType<?>> ctypes = new ArrayList<AbstractType<?>>(((CompositeType)cfm.comparator).types);
if (cfDef.hasCollections)
ctypes.set(ctypes.size() - 1, newColType);
else
ctypes.add(newColType);
cfm.comparator = CompositeType.getInstance(ctypes);
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key,
type,
null,
null,
null,
componentIndex));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
AbstractType<?> newType = validator.getType();
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
if (cfDef.hasCompositeKey)
{
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.getKeyValidator()).types);
newTypes.set(name.position, newType);
cfm.keyValidator(CompositeType.getInstance(newTypes));
}
else
{
cfm.keyValidator(newType);
}
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, validator.getType());
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(validator.getType());
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(validator.getType());
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values())
{
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString()
{
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)",
cfName,
oType,
columnName,
validator);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement
{
public static enum Type
{
ADD, ALTER, DROP, OPTS
}
public final Type oType;
public final ParsedType validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps;
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, ParsedType validator, CFPropDefs cfProps)
{
super(name);
this.oType = type;
this.columnName = columnName;
this.validator = validator; // used only for ADD/ALTER commands
this.cfProps = cfProps;
}
public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException
{
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws RequestValidationException
{
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch (oType)
{
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null)
{
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
Integer componentIndex = cfDef.isComposite ? ((CompositeType)meta.comparator).types.size() - 1 : null;
AbstractType<?> type = validator.getType();
if (type instanceof CollectionType)
{
if (!cfDef.isComposite)
throw new InvalidRequestException("Cannot use collection types with non-composite PRIMARY KEY");
componentIndex--;
Map<ByteBuffer, CollectionType> collections = cfDef.hasCollections
? new HashMap<ByteBuffer, CollectionType>(cfDef.getCollectionType().defined)
: new HashMap<ByteBuffer, CollectionType>();
collections.put(columnName.key, (CollectionType)type);
ColumnToCollectionType newColType = ColumnToCollectionType.getInstance(collections);
List<AbstractType<?>> ctypes = new ArrayList<AbstractType<?>>(((CompositeType)cfm.comparator).types);
if (cfDef.hasCollections)
ctypes.set(ctypes.size() - 1, newColType);
else
ctypes.add(newColType);
cfm.comparator = CompositeType.getInstance(ctypes);
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key,
type,
null,
null,
null,
componentIndex));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
AbstractType<?> newType = validator.getType();
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
if (cfDef.hasCompositeKey)
{
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.getKeyValidator()).types);
newTypes.set(name.position, newType);
cfm.keyValidator(CompositeType.getInstance(newTypes));
}
else
{
cfm.keyValidator(newType);
}
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, validator.getType());
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(validator.getType());
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(validator.getType());
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values())
{
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString()
{
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)",
cfName,
oType,
columnName,
validator);
}
}
MergeMethods
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement {
public static enum Type {
ADD , ALTER , DROP , OPTS}
public final Type oType;
public final ParsedType validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps;
<<<<<<< MINE
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, String validator, CFPropDefs cfProps)
{
super(name);
this.oType = type;
this.columnName = columnName;
this.validator = validator; // used only for ADD/ALTER commands
this.cfProps = cfProps;
}
=======
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, ParsedType validator, CFPropDefs cfProps)
{
super(name);
this.oType = type;
this.columnName = columnName;
this.validator = validator; // used only for ADD/ALTER commands
this.cfProps = cfProps;
}
>>>>>>> YOURS
public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException
{
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws RequestValidationException
{
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch (oType)
{
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null)
{
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
Integer componentIndex = cfDef.isComposite ? ((CompositeType)meta.comparator).types.size() - 1 : null;
AbstractType<?> type = validator.getType();
if (type instanceof CollectionType)
{
if (!cfDef.isComposite)
throw new InvalidRequestException("Cannot use collection types with non-composite PRIMARY KEY");
componentIndex--;
Map<ByteBuffer, CollectionType> collections = cfDef.hasCollections
? new HashMap<ByteBuffer, CollectionType>(cfDef.getCollectionType().defined)
: new HashMap<ByteBuffer, CollectionType>();
collections.put(columnName.key, (CollectionType)type);
ColumnToCollectionType newColType = ColumnToCollectionType.getInstance(collections);
List<AbstractType<?>> ctypes = new ArrayList<AbstractType<?>>(((CompositeType)cfm.comparator).types);
if (cfDef.hasCollections)
ctypes.set(ctypes.size() - 1, newColType);
else
ctypes.add(newColType);
cfm.comparator = CompositeType.getInstance(ctypes);
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key,
type,
null,
null,
null,
componentIndex));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
AbstractType<?> newType = validator.getType();
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
if (cfDef.hasCompositeKey)
{
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.getKeyValidator()).types);
newTypes.set(name.position, newType);
cfm.keyValidator(CompositeType.getInstance(newTypes));
}
else
{
cfm.keyValidator(newType);
}
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, validator.getType());
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(validator.getType());
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(validator.getType());
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values())
{
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString()
{
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)",
cfName,
oType,
columnName,
validator);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement {
public static enum Type {
ADD , ALTER , DROP , OPTS}
public final Type oType;
public final ParsedType validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps;
<<<<<<< MINE
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, String validator, CFPropDefs cfProps)
{
super(name);
this.oType = type;
this.columnName = columnName;
this.validator = validator; // used only for ADD/ALTER commands
this.cfProps = cfProps;
}
=======
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, ParsedType validator, CFPropDefs cfProps)
{
super(name);
this.oType = type;
this.columnName = columnName;
this.validator = validator; // used only for ADD/ALTER commands
this.cfProps = cfProps;
}
>>>>>>> YOURS
public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException
{
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws RequestValidationException
{
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch (oType)
{
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null)
{
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
Integer componentIndex = cfDef.isComposite ? ((CompositeType)meta.comparator).types.size() - 1 : null;
AbstractType<?> type = validator.getType();
if (type instanceof CollectionType)
{
if (!cfDef.isComposite)
throw new InvalidRequestException("Cannot use collection types with non-composite PRIMARY KEY");
componentIndex--;
Map<ByteBuffer, CollectionType> collections = cfDef.hasCollections
? new HashMap<ByteBuffer, CollectionType>(cfDef.getCollectionType().defined)
: new HashMap<ByteBuffer, CollectionType>();
collections.put(columnName.key, (CollectionType)type);
ColumnToCollectionType newColType = ColumnToCollectionType.getInstance(collections);
List<AbstractType<?>> ctypes = new ArrayList<AbstractType<?>>(((CompositeType)cfm.comparator).types);
if (cfDef.hasCollections)
ctypes.set(ctypes.size() - 1, newColType);
else
ctypes.add(newColType);
cfm.comparator = CompositeType.getInstance(ctypes);
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key,
type,
null,
null,
null,
componentIndex));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
AbstractType<?> newType = validator.getType();
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
if (cfDef.hasCompositeKey)
{
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.getKeyValidator()).types);
newTypes.set(name.position, newType);
cfm.keyValidator(CompositeType.getInstance(newTypes));
}
else
{
cfm.keyValidator(newType);
}
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, validator.getType());
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(validator.getType());
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(validator.getType());
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values())
{
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString()
{
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)",
cfName,
oType,
columnName,
validator);
}
}
KeepBothMethods
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement {
public static enum Type {
ADD(), ALTER(), DROP(), OPTS()
}
public final Type oType;
public final ParsedType validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps;
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, String validator, CFPropDefs cfProps) {
super(name);
this.oType = type;
this.columnName = columnName;
// used only for ADD/ALTER commands
this.validator = validator;
this.cfProps = cfProps;
}
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, ParsedType validator, CFPropDefs cfProps) {
super(name);
this.oType = type;
this.columnName = columnName;
// used only for ADD/ALTER commands
this.validator = validator;
this.cfProps = cfProps;
}
public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException {
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws RequestValidationException {
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch(oType) {
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null) {
switch(name.kind) {
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
Integer componentIndex = cfDef.isComposite ? ((CompositeType) meta.comparator).types.size() - 1 : null;
AbstractType<?> type = validator.getType();
if (type instanceof CollectionType) {
if (!cfDef.isComposite)
throw new InvalidRequestException("Cannot use collection types with non-composite PRIMARY KEY");
componentIndex--;
Map<ByteBuffer, CollectionType> collections = cfDef.hasCollections ? new HashMap<ByteBuffer, CollectionType>(cfDef.getCollectionType().defined) : new HashMap<ByteBuffer, CollectionType>();
collections.put(columnName.key, (CollectionType) type);
ColumnToCollectionType newColType = ColumnToCollectionType.getInstance(collections);
List<AbstractType<?>> ctypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
if (cfDef.hasCollections)
ctypes.set(ctypes.size() - 1, newColType);
else
ctypes.add(newColType);
cfm.comparator = CompositeType.getInstance(ctypes);
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key, type, null, null, null, componentIndex));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch(name.kind) {
case KEY_ALIAS:
AbstractType<?> newType = validator.getType();
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
if (cfDef.hasCompositeKey) {
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.getKeyValidator()).types);
newTypes.set(name.position, newType);
cfm.keyValidator(CompositeType.getInstance(newTypes));
} else {
cfm.keyValidator(newType);
}
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, validator.getType());
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(validator.getType());
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(validator.getType());
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch(name.kind) {
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values()) {
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString() {
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)", cfName, oType, columnName, validator);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement {
public static enum Type {
ADD(), ALTER(), DROP(), OPTS()
}
public final Type oType;
public final ParsedType validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps;
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, String validator, CFPropDefs cfProps) {
super(name);
this.oType = type;
this.columnName = columnName;
// used only for ADD/ALTER commands
this.validator = validator;
this.cfProps = cfProps;
}
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, ParsedType validator, CFPropDefs cfProps) {
super(name);
this.oType = type;
this.columnName = columnName;
// used only for ADD/ALTER commands
this.validator = validator;
this.cfProps = cfProps;
}
public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException {
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws RequestValidationException {
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch(oType) {
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null) {
switch(name.kind) {
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
Integer componentIndex = cfDef.isComposite ? ((CompositeType) meta.comparator).types.size() - 1 : null;
AbstractType<?> type = validator.getType();
if (type instanceof CollectionType) {
if (!cfDef.isComposite)
throw new InvalidRequestException("Cannot use collection types with non-composite PRIMARY KEY");
componentIndex--;
Map<ByteBuffer, CollectionType> collections = cfDef.hasCollections ? new HashMap<ByteBuffer, CollectionType>(cfDef.getCollectionType().defined) : new HashMap<ByteBuffer, CollectionType>();
collections.put(columnName.key, (CollectionType) type);
ColumnToCollectionType newColType = ColumnToCollectionType.getInstance(collections);
List<AbstractType<?>> ctypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
if (cfDef.hasCollections)
ctypes.set(ctypes.size() - 1, newColType);
else
ctypes.add(newColType);
cfm.comparator = CompositeType.getInstance(ctypes);
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key, type, null, null, null, componentIndex));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch(name.kind) {
case KEY_ALIAS:
AbstractType<?> newType = validator.getType();
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
if (cfDef.hasCompositeKey) {
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.getKeyValidator()).types);
newTypes.set(name.position, newType);
cfm.keyValidator(CompositeType.getInstance(newTypes));
} else {
cfm.keyValidator(newType);
}
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, validator.getType());
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(validator.getType());
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(validator.getType());
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch(name.kind) {
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values()) {
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString() {
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)", cfName, oType, columnName, validator);
}
}
Safe
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement {
public static enum Type {
ADD(), ALTER(), DROP(), OPTS()
}
public final Type oType;
public final ParsedType validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps;
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, String validator, CFPropDefs cfProps) {
super(name);
this.oType = type;
this.columnName = columnName;
// used only for ADD/ALTER commands
this.validator = validator;
this.cfProps = cfProps;
}
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, ParsedType validator, CFPropDefs cfProps) {
super(name);
this.oType = type;
this.columnName = columnName;
// used only for ADD/ALTER commands
this.validator = validator;
this.cfProps = cfProps;
}
public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException {
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws RequestValidationException {
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch(oType) {
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null) {
switch(name.kind) {
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
Integer componentIndex = cfDef.isComposite ? ((CompositeType) meta.comparator).types.size() - 1 : null;
AbstractType<?> type = validator.getType();
if (type instanceof CollectionType) {
if (!cfDef.isComposite)
throw new InvalidRequestException("Cannot use collection types with non-composite PRIMARY KEY");
componentIndex--;
Map<ByteBuffer, CollectionType> collections = cfDef.hasCollections ? new HashMap<ByteBuffer, CollectionType>(cfDef.getCollectionType().defined) : new HashMap<ByteBuffer, CollectionType>();
collections.put(columnName.key, (CollectionType) type);
ColumnToCollectionType newColType = ColumnToCollectionType.getInstance(collections);
List<AbstractType<?>> ctypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
if (cfDef.hasCollections)
ctypes.set(ctypes.size() - 1, newColType);
else
ctypes.add(newColType);
cfm.comparator = CompositeType.getInstance(ctypes);
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key, type, null, null, null, componentIndex));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch(name.kind) {
case KEY_ALIAS:
AbstractType<?> newType = validator.getType();
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
if (cfDef.hasCompositeKey) {
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.getKeyValidator()).types);
newTypes.set(name.position, newType);
cfm.keyValidator(CompositeType.getInstance(newTypes));
} else {
cfm.keyValidator(newType);
}
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, validator.getType());
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(validator.getType());
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(validator.getType());
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch(name.kind) {
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values()) {
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString() {
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)", cfName, oType, columnName, validator);
}
}
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.cql3.*;
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.exceptions.*;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement {
public static enum Type {
ADD(), ALTER(), DROP(), OPTS()
}
public final Type oType;
public final ParsedType validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps;
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, String validator, CFPropDefs cfProps) {
super(name);
this.oType = type;
this.columnName = columnName;
// used only for ADD/ALTER commands
this.validator = validator;
this.cfProps = cfProps;
}
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, ParsedType validator, CFPropDefs cfProps) {
super(name);
this.oType = type;
this.columnName = columnName;
// used only for ADD/ALTER commands
this.validator = validator;
this.cfProps = cfProps;
}
public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException {
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws RequestValidationException {
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch(oType) {
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null) {
switch(name.kind) {
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
Integer componentIndex = cfDef.isComposite ? ((CompositeType) meta.comparator).types.size() - 1 : null;
AbstractType<?> type = validator.getType();
if (type instanceof CollectionType) {
if (!cfDef.isComposite)
throw new InvalidRequestException("Cannot use collection types with non-composite PRIMARY KEY");
componentIndex--;
Map<ByteBuffer, CollectionType> collections = cfDef.hasCollections ? new HashMap<ByteBuffer, CollectionType>(cfDef.getCollectionType().defined) : new HashMap<ByteBuffer, CollectionType>();
collections.put(columnName.key, (CollectionType) type);
ColumnToCollectionType newColType = ColumnToCollectionType.getInstance(collections);
List<AbstractType<?>> ctypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
if (cfDef.hasCollections)
ctypes.set(ctypes.size() - 1, newColType);
else
ctypes.add(newColType);
cfm.comparator = CompositeType.getInstance(ctypes);
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key, type, null, null, null, componentIndex));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch(name.kind) {
case KEY_ALIAS:
AbstractType<?> newType = validator.getType();
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
if (cfDef.hasCompositeKey) {
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.getKeyValidator()).types);
newTypes.set(name.position, newType);
cfm.keyValidator(CompositeType.getInstance(newTypes));
} else {
cfm.keyValidator(newType);
}
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, validator.getType());
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(validator.getType());
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(validator.getType());
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch(name.kind) {
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values()) {
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString() {
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)", cfName, oType, columnName, validator);
}
}
Unstructured
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.cql3.*;
<<<<<<< MINE
import org.apache.cassandra.config.*;
import org.apache.cassandra.db.marshal.*;
=======
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.exceptions.*;
>>>>>>> YOURS
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement
{
public static enum Type
{
ADD, ALTER, DROP, OPTS
}
public final Type oType;
public final ParsedType validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps;
<<<<<<< MINE
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, String validator, CFPropDefs cfProps)
=======
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, ParsedType validator, CFPropDefs cfProps)
>>>>>>> YOURS
{
super(name);
this.oType = type;
this.columnName = columnName;
this.validator = validator; // used only for ADD/ALTER commands
this.cfProps = cfProps;
}
public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException
{
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws RequestValidationException
{
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch (oType)
{
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null)
{
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
Integer componentIndex = cfDef.isComposite ? ((CompositeType)meta.comparator).types.size() - 1 : null;
AbstractType<?> type = validator.getType();
if (type instanceof CollectionType)
{
if (!cfDef.isComposite)
throw new InvalidRequestException("Cannot use collection types with non-composite PRIMARY KEY");
componentIndex--;
Map<ByteBuffer, CollectionType> collections = cfDef.hasCollections
? new HashMap<ByteBuffer, CollectionType>(cfDef.getCollectionType().defined)
: new HashMap<ByteBuffer, CollectionType>();
collections.put(columnName.key, (CollectionType)type);
ColumnToCollectionType newColType = ColumnToCollectionType.getInstance(collections);
List<AbstractType<?>> ctypes = new ArrayList<AbstractType<?>>(((CompositeType)cfm.comparator).types);
if (cfDef.hasCollections)
ctypes.set(ctypes.size() - 1, newColType);
else
ctypes.add(newColType);
cfm.comparator = CompositeType.getInstance(ctypes);
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key,
type,
null,
null,
null,
componentIndex));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
AbstractType<?> newType = validator.getType();
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
if (cfDef.hasCompositeKey)
{
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.getKeyValidator()).types);
newTypes.set(name.position, newType);
cfm.keyValidator(CompositeType.getInstance(newTypes));
}
else
{
cfm.keyValidator(newType);
}
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, validator.getType());
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(validator.getType());
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(validator.getType());
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values())
{
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString()
{
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)",
cfName,
oType,
columnName,
validator);
}
}/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3.statements;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.cassandra.auth.Permission;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.cql3.*;
<<<<<<< MINE
import org.apache.cassandra.config.*;
import org.apache.cassandra.db.marshal.*;
=======
import org.apache.cassandra.db.marshal.*;
import org.apache.cassandra.exceptions.*;
>>>>>>> YOURS
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.MigrationManager;
import static org.apache.cassandra.thrift.ThriftValidation.validateColumnFamily;
public class AlterTableStatement extends SchemaAlteringStatement
{
public static enum Type
{
ADD, ALTER, DROP, OPTS
}
public final Type oType;
public final ParsedType validator;
public final ColumnIdentifier columnName;
private final CFPropDefs cfProps;
<<<<<<< MINE
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, String validator, CFPropDefs cfProps)
=======
public AlterTableStatement(CFName name, Type type, ColumnIdentifier columnName, ParsedType validator, CFPropDefs cfProps)
>>>>>>> YOURS
{
super(name);
this.oType = type;
this.columnName = columnName;
this.validator = validator; // used only for ADD/ALTER commands
this.cfProps = cfProps;
}
public void checkAccess(ClientState state) throws UnauthorizedException, InvalidRequestException
{
state.hasColumnFamilyAccess(keyspace(), columnFamily(), Permission.ALTER);
}
public void announceMigration() throws RequestValidationException
{
CFMetaData meta = validateColumnFamily(keyspace(), columnFamily());
CFMetaData cfm = meta.clone();
CFDefinition cfDef = meta.getCfDef();
CFDefinition.Name name = this.oType == Type.OPTS ? null : cfDef.get(columnName);
switch (oType)
{
case ADD:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot add new column to a compact CF");
if (name != null)
{
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with a PRIMARY KEY part", columnName));
case COLUMN_METADATA:
throw new InvalidRequestException(String.format("Invalid column name %s because it conflicts with an existing column", columnName));
}
}
Integer componentIndex = cfDef.isComposite ? ((CompositeType)meta.comparator).types.size() - 1 : null;
AbstractType<?> type = validator.getType();
if (type instanceof CollectionType)
{
if (!cfDef.isComposite)
throw new InvalidRequestException("Cannot use collection types with non-composite PRIMARY KEY");
componentIndex--;
Map<ByteBuffer, CollectionType> collections = cfDef.hasCollections
? new HashMap<ByteBuffer, CollectionType>(cfDef.getCollectionType().defined)
: new HashMap<ByteBuffer, CollectionType>();
collections.put(columnName.key, (CollectionType)type);
ColumnToCollectionType newColType = ColumnToCollectionType.getInstance(collections);
List<AbstractType<?>> ctypes = new ArrayList<AbstractType<?>>(((CompositeType)cfm.comparator).types);
if (cfDef.hasCollections)
ctypes.set(ctypes.size() - 1, newColType);
else
ctypes.add(newColType);
cfm.comparator = CompositeType.getInstance(ctypes);
}
cfm.addColumnDefinition(new ColumnDefinition(columnName.key,
type,
null,
null,
null,
componentIndex));
break;
case ALTER:
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
AbstractType<?> newType = validator.getType();
if (newType instanceof CounterColumnType)
throw new InvalidRequestException(String.format("counter type is not supported for PRIMARY KEY part %s", columnName));
if (cfDef.hasCompositeKey)
{
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.getKeyValidator()).types);
newTypes.set(name.position, newType);
cfm.keyValidator(CompositeType.getInstance(newTypes));
}
else
{
cfm.keyValidator(newType);
}
break;
case COLUMN_ALIAS:
assert cfDef.isComposite;
List<AbstractType<?>> newTypes = new ArrayList<AbstractType<?>>(((CompositeType) cfm.comparator).types);
newTypes.set(name.position, validator.getType());
cfm.comparator = CompositeType.getInstance(newTypes);
break;
case VALUE_ALIAS:
cfm.defaultValidator(validator.getType());
break;
case COLUMN_METADATA:
ColumnDefinition column = cfm.getColumnDefinition(columnName.key);
column.setValidator(validator.getType());
cfm.addColumnDefinition(column);
break;
}
break;
case DROP:
if (cfDef.isCompact)
throw new InvalidRequestException("Cannot drop columns from a compact CF");
if (name == null)
throw new InvalidRequestException(String.format("Column %s was not found in CF %s", columnName, columnFamily()));
switch (name.kind)
{
case KEY_ALIAS:
case COLUMN_ALIAS:
throw new InvalidRequestException(String.format("Cannot drop PRIMARY KEY part %s", columnName));
case COLUMN_METADATA:
ColumnDefinition toDelete = null;
for (ColumnDefinition columnDef : cfm.getColumn_metadata().values())
{
if (columnDef.name.equals(columnName.key))
toDelete = columnDef;
}
assert toDelete != null;
cfm.removeColumnDefinition(toDelete);
break;
}
break;
case OPTS:
if (cfProps == null)
throw new InvalidRequestException(String.format("ALTER COLUMNFAMILY WITH invoked, but no parameters found"));
cfProps.validate();
cfProps.applyToCFMetadata(cfm);
break;
}
MigrationManager.announceColumnFamilyUpdate(cfm);
}
public String toString()
{
return String.format("AlterTableStatement(name=%s, type=%s, column=%s, validator=%s)",
cfName,
oType,
columnName,
validator);
}
}
Diff Result
No diff
Case 4 - gradle.rev_16c71_dcc4b.ExternalResourceResolver.java
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
* Returns the version conflict strategy used by this configuration
*
* @return strategy
*/
VersionConflictStrategy getVersionConflictStrategy();
/**
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
ResolutionStrategy getResolution();
/**
* The states a configuration can be into. A configuration is only mutable as long as it is
* in the unresolved state.
*/
enum State { UNRESOLVED, RESOLVED, RESOLVED_WITH_FAILURES }
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
* Returns the version conflict strategy used by this configuration
*
* @return strategy
*/
VersionConflictStrategy getVersionConflictStrategy();
/**
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
ResolutionStrategy getResolution();
/**
* The states a configuration can be into. A configuration is only mutable as long as it is
* in the unresolved state.
*/
enum State { UNRESOLVED, RESOLVED, RESOLVED_WITH_FAILURES }
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}
Left
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
ResolutionStrategy getResolutionStrategy();
/**
* The states a configuration can be into. A configuration is only mutable as long as it is
* in the unresolved state.
*/
enum State { UNRESOLVED, RESOLVED, RESOLVED_WITH_FAILURES }
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<? super Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<? super Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
ResolutionStrategy getResolutionStrategy();
/**
* The states a configuration can be into. A configuration is only mutable as long as it is
* in the unresolved state.
*/
enum State { UNRESOLVED, RESOLVED, RESOLVED_WITH_FAILURES }
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<? super Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<? super Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}
Right
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
* <strong>experimental</strong>
* This part of the api will change for sure!
* You can use it if you like but this part of the api will change without notice.
* <p>
*
* Returns the version conflict strategy used by this configuration
*
* @return strategy
*/
@Deprecated
VersionConflictStrategy getVersionConflictStrategy();
/**
* <strong>experimental</strong>
* This part of the api will change for sure!
* You can use it if you like but this part of the api will change without notice.
* <p>
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
@Deprecated
ResolutionStrategy getResolution();
/**
* The states a configuration can be into. A configuration is only mutable as long as it is
* in the unresolved state.
*/
enum State { UNRESOLVED, RESOLVED, RESOLVED_WITH_FAILURES }
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
* <strong>experimental</strong>
* This part of the api will change for sure!
* You can use it if you like but this part of the api will change without notice.
* <p>
*
* Returns the version conflict strategy used by this configuration
*
* @return strategy
*/
@Deprecated
VersionConflictStrategy getVersionConflictStrategy();
/**
* <strong>experimental</strong>
* This part of the api will change for sure!
* You can use it if you like but this part of the api will change without notice.
* <p>
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
@Deprecated
ResolutionStrategy getResolution();
/**
* The states a configuration can be into. A configuration is only mutable as long as it is
* in the unresolved state.
*/
enum State { UNRESOLVED, RESOLVED, RESOLVED_WITH_FAILURES }
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}
MergeMethods
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
* Returns the version conflict strategy used by this configuration
*
* @return strategy
*/
@Deprecated
VersionConflictStrategy getVersionConflictStrategy();
/**
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
ResolutionStrategy getResolutionStrategy()
/**
* The states a configuration can be into. A configuration is only mutable as long as it is
* in the unresolved state.
*/
enum State { UNRESOLVED , RESOLVED , RESOLVED_WITH_FAILURES}
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
* Returns the version conflict strategy used by this configuration
*
* @return strategy
*/
@Deprecated
VersionConflictStrategy getVersionConflictStrategy();
/**
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
ResolutionStrategy getResolutionStrategy()
/**
* The states a configuration can be into. A configuration is only mutable as long as it is
* in the unresolved state.
*/
enum State { UNRESOLVED , RESOLVED , RESOLVED_WITH_FAILURES}
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}
KeepBothMethods
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
ResolutionStrategy getResolutionStrategy();
/**
* Returns the version conflict strategy used by this configuration
*
* @return strategy
*/
@Deprecated
VersionConflictStrategy getVersionConflictStrategy();
/**
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
@Deprecated
ResolutionStrategy getResolution();
/**
* The states a configuration can be into. A configuration is only mutable as long as it is
* in the unresolved state.
*/
enum State {
UNRESOLVED(), RESOLVED(), RESOLVED_WITH_FAILURES()
}
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<? super Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<? super Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
ResolutionStrategy getResolutionStrategy();
/**
* Returns the version conflict strategy used by this configuration
*
* @return strategy
*/
@Deprecated
VersionConflictStrategy getVersionConflictStrategy();
/**
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
@Deprecated
ResolutionStrategy getResolution();
/**
* The states a configuration can be into. A configuration is only mutable as long as it is
* in the unresolved state.
*/
enum State {
UNRESOLVED(), RESOLVED(), RESOLVED_WITH_FAILURES()
}
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<? super Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<? super Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}
Safe
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
* Returns the version conflict strategy used by this configuration
*
* @return strategy
*/
<<<<<<< MINE
@Deprecated
VersionConflictStrategy getVersionConflictStrategy();
=======
>>>>>>> YOURS
/**
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
<<<<<<< MINE
@Deprecated
ResolutionStrategy getResolution();
=======
ResolutionStrategy getResolutionStrategy();
>>>>>>> YOURS
enum State { UNRESOLVED , RESOLVED , RESOLVED_WITH_FAILURES}
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
* Returns the version conflict strategy used by this configuration
*
* @return strategy
*/
<<<<<<< MINE
@Deprecated
VersionConflictStrategy getVersionConflictStrategy();
=======
>>>>>>> YOURS
/**
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
<<<<<<< MINE
@Deprecated
ResolutionStrategy getResolution();
=======
ResolutionStrategy getResolutionStrategy();
>>>>>>> YOURS
enum State { UNRESOLVED , RESOLVED , RESOLVED_WITH_FAILURES}
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}
Unstructured
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
<<<<<<< MINE
=======
* <strong>experimental</strong>
* This part of the api will change for sure!
* You can use it if you like but this part of the api will change without notice.
* <p>
*
* Returns the version conflict strategy used by this configuration
*
* @return strategy
*/
@Deprecated
VersionConflictStrategy getVersionConflictStrategy();
/**
* <strong>experimental</strong>
* This part of the api will change for sure!
* You can use it if you like but this part of the api will change without notice.
* <p>
>>>>>>> YOURS
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
<<<<<<< MINE
ResolutionStrategy getResolutionStrategy();
=======
@Deprecated
ResolutionStrategy getResolution();
>>>>>>> YOURS
/**
* The states a configuration can be into. A configuration is only mutable as long as it is
* in the unresolved state.
*/
enum State { UNRESOLVED, RESOLVED, RESOLVED_WITH_FAILURES }
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<? super Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<? super Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.artifacts;
import groovy.lang.Closure;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.file.FileCollection;
import org.gradle.api.specs.Spec;
import org.gradle.api.tasks.TaskDependency;
import java.io.File;
import java.util.Map;
import java.util.Set;
/**
* <p>A {@code Configuration} represents a group of artifacts and their dependencies.</p>
*/
public interface Configuration extends FileCollection {
/**
<<<<<<< MINE
=======
* <strong>experimental</strong>
* This part of the api will change for sure!
* You can use it if you like but this part of the api will change without notice.
* <p>
*
* Returns the version conflict strategy used by this configuration
*
* @return strategy
*/
@Deprecated
VersionConflictStrategy getVersionConflictStrategy();
/**
* <strong>experimental</strong>
* This part of the api will change for sure!
* You can use it if you like but this part of the api will change without notice.
* <p>
>>>>>>> YOURS
* Returns the resolution strategy used by this configuration
*
* @return strategy
*/
<<<<<<< MINE
ResolutionStrategy getResolutionStrategy();
=======
@Deprecated
ResolutionStrategy getResolution();
>>>>>>> YOURS
/**
* The states a configuration can be into. A configuration is only mutable as long as it is
* in the unresolved state.
*/
enum State { UNRESOLVED, RESOLVED, RESOLVED_WITH_FAILURES }
/**
* Returns the state of the configuration.
*
* @see org.gradle.api.artifacts.Configuration.State
*/
State getState();
/**
* Returns the name of this configuration.
*
* @return The configuration name, never null.
*/
String getName();
/**
* A {@link org.gradle.api.Namer} namer for configurations that returns {@link #getName()}.
*/
static class Namer implements org.gradle.api.Namer<Configuration> {
public String determineName(Configuration c) {
return c.getName();
}
}
/**
* Returns true if this is a visible configuration. A visible configuration is usable outside the project it belongs
* to. The default value is true.
*
* @return true if this is a visible configuration.
*/
boolean isVisible();
/**
* Sets the visibility of this configuration. When visible is set to true, this configuration is visibile outside
* the project it belongs to. The default value is true.
*
* @param visible true if this is a visible configuration
* @return this configuration
*/
Configuration setVisible(boolean visible);
/**
* Returns the names of the configurations which this configuration extends from. The artifacts of the super
* configurations are also available in this configuration.
*
* @return The super configurations. Returns an empty set when this configuration does not extend any others.
*/
Set<Configuration> getExtendsFrom();
/**
* Sets the configurations which this configuration extends from.
*
* @param superConfigs The super configuration. Should not be null.
* @return this configuration
*/
Configuration setExtendsFrom(Set<Configuration> superConfigs);
/**
* Adds the given configurations to the set of configuration which this configuration extends from.
*
* @param superConfigs The super configurations.
* @return this configuration
*/
Configuration extendsFrom(Configuration... superConfigs);
/**
* Returns the transitivity of this configuration. A transitive configuration contains the transitive closure of its
* direct dependencies, and all their dependencies. An intransitive configuration contains only the direct
* dependencies. The default value is true.
*
* @return true if this is a transitive configuration, false otherwise.
*/
boolean isTransitive();
/**
* Sets the transitivity of this configuration. When set to true, this configuration will contain the transitive
* closure of its dependencies and their dependencies. The default value is true.
*
* @param t true if this is a transitive configuration.
* @return this configuration
*/
Configuration setTransitive(boolean t);
/**
* Returns the description for this configuration.
*
* @return the description. May be null.
*/
String getDescription();
/**
* Sets the description for this configuration.
*
* @param description the description. May be null
* @return this configuration
*/
Configuration setDescription(String description);
/**
* Gets a ordered set including this configuration and all superconfigurations
* recursively.
* @return the list of all configurations
*/
Set<Configuration> getHierarchy();
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* the resulting set of files.
*
* @return The files of this configuration.
*/
Set<File> resolve();
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #files(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Closure dependencySpecClosure);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the subset of dependencies specified by the dependencySpec
* is returned.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Spec<? super Dependency> dependencySpec);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration.
* But only the resulting set of files belonging to the specified dependencies
* is returned.
*
* @param dependencies The dependences to be resolved
* @return The files of a subset of dependencies of this configuration.
*/
Set<File> files(Dependency... dependencies);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to the subset
* of dependencies specified by the dependencySpec is contained in the FileCollection.
*
* @param dependencySpec The spec describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Spec<? super Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as
* {@link #fileCollection(org.gradle.api.specs.Spec)}.
*
* @param dependencySpecClosure The closure describing a filter applied to the all the dependencies of this configuration (including dependencies from extended configurations).
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Closure dependencySpecClosure);
/**
* Resolves this configuration lazily. The resolve happens when the elements of the returned FileCollection get accessed the first time.
* This locates and downloads the files which make up this configuration. Only the resulting set of files belonging to specified
* dependencies is contained in the FileCollection.
*
* @param dependencies The dependencies for which the FileCollection should contain the files.
* @return The FileCollection with a subset of dependencies of this configuration.
*/
FileCollection fileCollection(Dependency... dependencies);
/**
* Resolves this configuration. This locates and downloads the files which make up this configuration, and returns
* a ResolvedConfiguration that may be used to determine information about the resolve (including errors).
*
* @return The ResolvedConfiguration object
*/
ResolvedConfiguration getResolvedConfiguration();
/**
* Returns the name of the task that upload the artifacts of this configuration to repositories
* declared by the user.
*
* @see org.gradle.api.tasks.Upload
*/
String getUploadTaskName();
/**
* Returns a {@code TaskDependency} object containing all required dependencies to build the internal dependencies
* (e.g. project dependencies) belonging to this configuration or to one of its super configurations.
*
* @return a TaskDependency object
*/
TaskDependency getBuildDependencies();
/**
* Returns a TaskDependency object containing dependencies on all tasks with the specified name from project
* dependencies related to this configuration or one of its super configurations. These other projects may be
* projects this configuration depends on or projects with a similarly named configuration that depend on this one
* based on the useDependOn argument.
*
* @param useDependedOn if true, add tasks from project dependencies in this configuration, otherwise use projects
* from configurations with the same name that depend on this one.
* @param taskName name of task to depend on
* @return the populated TaskDependency object
*/
TaskDependency getTaskDependencyFromProjectDependency(boolean useDependedOn, final String taskName);
/**
* Returns a {@code TaskDependency} object containing all required tasks to build the artifacts
* belonging to this configuration or to one of its super configurations.
*
* @return a task dependency object
* @deprecated Use {@link PublishArtifactSet#getBuildDependencies()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
TaskDependency getBuildArtifacts();
/**
* Gets the set of dependencies directly contained in this configuration
* (ignoring superconfigurations).
*
* @return the set of dependencies
*/
DependencySet getDependencies();
/**
* <p>Gets the complete set of dependencies including those contributed by
* superconfigurations.</p>
*
* @return the (read-only) set of dependencies
*/
DependencySet getAllDependencies();
/**
* <p>Gets the set of dependencies of type T directly contained in this configuration (ignoring superconfigurations).</p>
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getDependencies(Class<T> type);
/**
* Gets the set of dependencies of type T for this configuration including those contributed by superconfigurations.
*
* <p>The returned set is live, in that any future dependencies added to this configuration that match the type will appear in the returned set.</p>
*
* @param type the dependency type
* @param <T> the dependency type
* @return The (read-only) set.
* @deprecated Use {@link DependencySet#withType(Class)} on {@link #getAllDependencies()} instead.
*/
@Deprecated
<T extends Dependency> DomainObjectSet<T> getAllDependencies(Class<T> type);
/**
* Adds a dependency to this configuration.
*
* @param dependency The dependency to be added.
* @deprecated Use {@link DependencySet#add(Object)} on {@link #getDependencies()} instead.
*/
@Deprecated
void addDependency(Dependency dependency);
/**
* Returns the artifacts of this configuration excluding the artifacts of extended configurations.
*
* @return The set.
*/
PublishArtifactSet getArtifacts();
/**
* Returns the artifacts of this configuration including the artifacts of extended configurations.
*
* @return The (read-only) set.
*/
PublishArtifactSet getAllArtifacts();
/**
* Returns the artifacts of this configuration as a {@link FileCollection}, including artifacts of extended
* configurations.
*
* @return the artifact files.
* @deprecated Use {@link PublishArtifactSet#getFiles()} on {@link #getAllArtifacts()} instead.
*/
@Deprecated
FileCollection getAllArtifactFiles();
/**
* Returns the exclude rules applied for resolving any dependency of this configuration.
*
* @see #exclude(java.util.Map)
*/
Set<ExcludeRule> getExcludeRules();
/**
* Adds an exclude rule to exclude transitive dependencies for all dependencies of this configuration.
* You can also add exclude rules per-dependency. See {@link ModuleDependency#exclude(java.util.Map)}.
*
* @param excludeProperties the properties to define the exclude rule.
* @return this
*/
Configuration exclude(Map<String, String> excludeProperties);
/**
* Returns all the configurations belonging to the same configuration container as this
* configuration (including this configuration).
*/
Set<Configuration> getAll();
/**
* Adds an artifact to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#add(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration addArtifact(PublishArtifact artifact);
/**
* Removes an artifact from the artifacts to be published to this configuration.
*
* @param artifact The artifact.
* @return this
* @deprecated Use {@link PublishArtifactSet#remove(Object)} on {@link #getArtifacts()} instead.
*/
@Deprecated
Configuration removeArtifact(PublishArtifact artifact);
/**
* Returns the incoming dependencies of this configuration.
*
* @return The incoming dependencies of this configuration. Never null.
*/
ResolvableDependencies getIncoming();
/**
* Creates a copy of this configuration that only contains the dependencies directly in this configuration
* (without contributions from superconfigurations). The new configuation will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copy();
/**
* Creates a copy of this configuration that contains the dependencies directly in this configuration
* and those derived from superconfigurations. The new configuration will be in the
* UNRESOLVED state, but will retain all other attributes of this configuration except superconfigurations.
* {@link #getHierarchy()} for the copy will not include any superconfigurations.
* @return copy of this configuration
*/
Configuration copyRecursive();
/**
* Creates a copy of this configuration ignoring superconfigurations (see {@link #copy()} but filtering
* the dependencies using the specified dependency spec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Spec<? super Dependency> dependencySpec);
/**
* Creates a copy of this configuration with dependencies from superconfigurations (see {@link #copyRecursive()})
* but filtering the dependencies using the dependencySpec.
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Spec<? super Dependency> dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copy(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copy(Closure dependencySpec);
/**
* Takes a closure which gets coerced into a Spec. Behaves otherwise in the same way as {@link #copyRecursive(org.gradle.api.specs.Spec)}
*
* @param dependencySpec filtering requirements
* @return copy of this configuration
*/
Configuration copyRecursive(Closure dependencySpec);
}
Diff Result
No diff
Case 6 - gradle.rev_cb507_bf1e6.DaemonClientServices.java
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, Integer idleTimeout)
Left modified body. Also added a private overloaded constructor.
Right changed signature parameter type: Integer → int
Unstructured reported conflict
Safe reported conflict
MergeMethods did not report conflict
KeepBothMethods kept both versions of constructor
Base
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final Integer idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, Integer idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = new DaemonRegistryServices(daemonBaseDir);
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final Integer idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, Integer idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = new DaemonRegistryServices(daemonBaseDir);
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}
Left
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final Integer idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, Integer idleTimeout) {
this(loggingServices, new DaemonRegistryServices(daemonBaseDir), idleTimeout);
}
private DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices registryServices, Integer idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = registryServices;
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final Integer idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, Integer idleTimeout) {
this(loggingServices, new DaemonRegistryServices(daemonBaseDir), idleTimeout);
}
private DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices registryServices, Integer idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = registryServices;
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}
Right
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final int idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, int idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = new DaemonRegistryServices(daemonBaseDir);
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final int idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, int idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = new DaemonRegistryServices(daemonBaseDir);
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}
MergeMethods
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final int idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, int idleTimeout) {
this(loggingServices, new DaemonRegistryServices(daemonBaseDir), idleTimeout);
}
private DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices registryServices, Integer idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = registryServices;
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final int idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, int idleTimeout) {
this(loggingServices, new DaemonRegistryServices(daemonBaseDir), idleTimeout);
}
private DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices registryServices, Integer idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = registryServices;
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}
KeepBothMethods
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final int idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, Integer idleTimeout) {
this(loggingServices, new DaemonRegistryServices(daemonBaseDir), idleTimeout);
}
private DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices registryServices, Integer idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = registryServices;
add(registryServices);
}
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, int idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = new DaemonRegistryServices(daemonBaseDir);
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final int idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, Integer idleTimeout) {
this(loggingServices, new DaemonRegistryServices(daemonBaseDir), idleTimeout);
}
private DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices registryServices, Integer idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = registryServices;
add(registryServices);
}
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, int idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = new DaemonRegistryServices(daemonBaseDir);
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}
Safe
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final int idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
<<<<<<< MINE
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, int idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = new DaemonRegistryServices(daemonBaseDir);
add(registryServices);
=======
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, Integer idleTimeout) {
this(loggingServices, new DaemonRegistryServices(daemonBaseDir), idleTimeout);
>>>>>>> YOURS
}
private DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices registryServices, Integer idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = registryServices;
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final int idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
<<<<<<< MINE
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, int idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = new DaemonRegistryServices(daemonBaseDir);
add(registryServices);
=======
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, Integer idleTimeout) {
this(loggingServices, new DaemonRegistryServices(daemonBaseDir), idleTimeout);
>>>>>>> YOURS
}
private DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices registryServices, Integer idleTimeout) {
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = registryServices;
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}
Unstructured
/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final int idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
<<<<<<< MINE
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, Integer idleTimeout) {
this(loggingServices, new DaemonRegistryServices(daemonBaseDir), idleTimeout);
}
private DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices registryServices, Integer idleTimeout) {
=======
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, int idleTimeout) {
>>>>>>> YOURS
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = registryServices;
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}/*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.launcher.daemon.client;
import org.gradle.api.internal.project.ServiceRegistry;
import org.gradle.launcher.daemon.context.DaemonContextBuilder;
import org.gradle.launcher.daemon.registry.DaemonDir;
import org.gradle.launcher.daemon.registry.DaemonRegistry;
import org.gradle.launcher.daemon.registry.DaemonRegistryServices;
import org.gradle.launcher.daemon.server.DaemonIdleTimeout;
import java.io.File;
/**
* Takes care of instantiating and wiring together the services required by the daemon client.
*/
public class DaemonClientServices extends DaemonClientServicesSupport {
private final int idleTimeout;
private final ServiceRegistry registryServices;
public DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices daemonBaseDir) {
this(loggingServices, daemonBaseDir, DaemonIdleTimeout.DEFAULT_IDLE_TIMEOUT);
}
<<<<<<< MINE
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, Integer idleTimeout) {
this(loggingServices, new DaemonRegistryServices(daemonBaseDir), idleTimeout);
}
private DaemonClientServices(ServiceRegistry loggingServices, DaemonRegistryServices registryServices, Integer idleTimeout) {
=======
public DaemonClientServices(ServiceRegistry loggingServices, File daemonBaseDir, int idleTimeout) {
>>>>>>> YOURS
super(loggingServices);
this.idleTimeout = idleTimeout;
this.registryServices = registryServices;
add(registryServices);
}
// here to satisfy DaemonClientServicesSupport contract
protected DaemonRegistry createDaemonRegistry() {
return registryServices.get(DaemonRegistry.class);
}
public Runnable makeDaemonStarter() {
return new DaemonStarter(registryServices.get(DaemonDir.class), idleTimeout);
}
protected void configureDaemonContextBuilder(DaemonContextBuilder builder) {
builder.setDaemonRegistryDir(registryServices.get(DaemonDir.class).getBaseDir());
}
}
Diff Result
No diff
Case 7 - infinispan.rev_a2154_744e9.AbstractPerEntryLockContainer.java
Lock acquireLock
Left added parameter, changed return type and modified body
Right modified body
Unstructured reported conflict on body
Safe kept both versions
MergeMethods reported conflict on body
KeepBothMethods kept both versions
Base
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import static org.infinispan.util.Util.safeRelease;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer implements LockContainer {
protected final ConcurrentMap<Object, Lock> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, Lock>(16, .75f, concurrencyLevel);
}
protected abstract Lock newLock();
public final Lock getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
Lock lock = locks.get(key);
if (lock == null) lock = newLock();
Lock existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null) lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public Lock acquireLock(Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
Lock lock = getLock(key);
boolean locked = false;
try {
locked = lock.tryLock(timeout, unit);
} catch (InterruptedException ie) {
safeRelease(lock);
throw ie;
} catch (Throwable th) {
safeRelease(lock);
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
Lock existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public void releaseLock(Object key) {
Lock l = locks.remove(key);
if (l != null) l.unlock();
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import static org.infinispan.util.Util.safeRelease;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer implements LockContainer {
protected final ConcurrentMap<Object, Lock> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, Lock>(16, .75f, concurrencyLevel);
}
protected abstract Lock newLock();
public final Lock getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
Lock lock = locks.get(key);
if (lock == null) lock = newLock();
Lock existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null) lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public Lock acquireLock(Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
Lock lock = getLock(key);
boolean locked = false;
try {
locked = lock.tryLock(timeout, unit);
} catch (InterruptedException ie) {
safeRelease(lock);
throw ie;
} catch (Throwable th) {
safeRelease(lock);
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
Lock existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public void releaseLock(Object key) {
Lock l = locks.remove(key);
if (l != null) l.unlock();
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}
Left
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import org.infinispan.context.InvocationContext;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer<L extends Lock> extends AbstractLockContainer<L> {
protected final ConcurrentMap<Object, L> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, L>(16, .75f, concurrencyLevel);
}
protected abstract L newLock();
public final L getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
L lock = locks.get(key);
if (lock == null) lock = newLock();
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null) lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public L acquireLock(InvocationContext ctx, Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
L lock = getLock(key);
boolean locked;
try {
locked = tryLock(lock, timeout, unit, ctx);
} catch (InterruptedException ie) {
safeRelease(lock, ctx);
throw ie;
} catch (Throwable th) {
safeRelease(lock, ctx);
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock, ctx);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public void releaseLock(InvocationContext ctx, Object key) {
L l = locks.remove(key);
if (l != null) unlock(l, ctx);
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import org.infinispan.context.InvocationContext;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer<L extends Lock> extends AbstractLockContainer<L> {
protected final ConcurrentMap<Object, L> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, L>(16, .75f, concurrencyLevel);
}
protected abstract L newLock();
public final L getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
L lock = locks.get(key);
if (lock == null) lock = newLock();
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null) lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public L acquireLock(InvocationContext ctx, Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
L lock = getLock(key);
boolean locked;
try {
locked = tryLock(lock, timeout, unit, ctx);
} catch (InterruptedException ie) {
safeRelease(lock, ctx);
throw ie;
} catch (Throwable th) {
safeRelease(lock, ctx);
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock, ctx);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public void releaseLock(InvocationContext ctx, Object key) {
L l = locks.remove(key);
if (l != null) unlock(l, ctx);
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}
Right
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import static org.infinispan.util.Util.safeRelease;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer implements LockContainer {
protected final ConcurrentMap<Object, Lock> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, Lock>(16, .75f, concurrencyLevel);
}
protected abstract Lock newLock();
public final Lock getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
Lock lock = locks.get(key);
if (lock == null) lock = newLock();
Lock existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null) lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public Lock acquireLock(Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
Lock lock = getLock(key);
boolean locked = false;
try {
locked = lock.tryLock(timeout, unit);
} catch (InterruptedException ie) {
safeRelease(lock);
throw ie;
} catch (Throwable th) {
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
Lock existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public void releaseLock(Object key) {
Lock l = locks.remove(key);
if (l != null) l.unlock();
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import static org.infinispan.util.Util.safeRelease;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer implements LockContainer {
protected final ConcurrentMap<Object, Lock> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, Lock>(16, .75f, concurrencyLevel);
}
protected abstract Lock newLock();
public final Lock getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
Lock lock = locks.get(key);
if (lock == null) lock = newLock();
Lock existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null) lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public Lock acquireLock(Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
Lock lock = getLock(key);
boolean locked = false;
try {
locked = lock.tryLock(timeout, unit);
} catch (InterruptedException ie) {
safeRelease(lock);
throw ie;
} catch (Throwable th) {
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
Lock existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public void releaseLock(Object key) {
Lock l = locks.remove(key);
if (l != null) l.unlock();
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}
MergeMethods
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import org.infinispan.context.InvocationContext;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer <L extends Lock> extends AbstractLockContainer<L> {
protected final ConcurrentMap<Object, L> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, L>(16, .75f, concurrencyLevel);
}
protected abstract L newLock();
public final L getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
L lock = locks.get(key);
if (lock == null) lock = newLock();
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null) lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public void releaseLock(InvocationContext ctx, Object key) {
L l = locks.remove(key);
if (l != null) unlock(l, ctx);
}
public L acquireLock(InvocationContext ctx, Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
L lock = getLock(key);
boolean locked;
try {
locked = tryLock(lock, timeout, unit, ctx);
} catch (InterruptedException ie) {
safeRelease(lock, ctx);
throw ie;
} catch (Throwable th) {
<<<<<<< MINE
safeRelease(lock, ctx);
=======
>>>>>>> YOURS
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock, ctx);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import org.infinispan.context.InvocationContext;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer <L extends Lock> extends AbstractLockContainer<L> {
protected final ConcurrentMap<Object, L> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, L>(16, .75f, concurrencyLevel);
}
protected abstract L newLock();
public final L getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
L lock = locks.get(key);
if (lock == null) lock = newLock();
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null) lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public void releaseLock(InvocationContext ctx, Object key) {
L l = locks.remove(key);
if (l != null) unlock(l, ctx);
}
public L acquireLock(InvocationContext ctx, Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
L lock = getLock(key);
boolean locked;
try {
locked = tryLock(lock, timeout, unit, ctx);
} catch (InterruptedException ie) {
safeRelease(lock, ctx);
throw ie;
} catch (Throwable th) {
<<<<<<< MINE
safeRelease(lock, ctx);
=======
>>>>>>> YOURS
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock, ctx);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}
KeepBothMethods
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import org.infinispan.context.InvocationContext;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer<L extends Lock> extends AbstractLockContainer<L> {
protected final ConcurrentMap<Object, L> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, L>(16, .75f, concurrencyLevel);
}
protected abstract L newLock();
public final L getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
L lock = locks.get(key);
if (lock == null)
lock = newLock();
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null)
lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public L acquireLock(InvocationContext ctx, Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
L lock = getLock(key);
boolean locked;
try {
locked = tryLock(lock, timeout, unit, ctx);
} catch (InterruptedException ie) {
safeRelease(lock, ctx);
throw ie;
} catch (Throwable th) {
safeRelease(lock, ctx);
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock, ctx);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public void releaseLock(InvocationContext ctx, Object key) {
L l = locks.remove(key);
if (l != null)
unlock(l, ctx);
}
public Lock acquireLock(Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
Lock lock = getLock(key);
boolean locked = false;
try {
locked = lock.tryLock(timeout, unit);
} catch (InterruptedException ie) {
safeRelease(lock);
throw ie;
} catch (Throwable th) {
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
Lock existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import org.infinispan.context.InvocationContext;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer<L extends Lock> extends AbstractLockContainer<L> {
protected final ConcurrentMap<Object, L> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, L>(16, .75f, concurrencyLevel);
}
protected abstract L newLock();
public final L getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
L lock = locks.get(key);
if (lock == null)
lock = newLock();
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null)
lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public L acquireLock(InvocationContext ctx, Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
L lock = getLock(key);
boolean locked;
try {
locked = tryLock(lock, timeout, unit, ctx);
} catch (InterruptedException ie) {
safeRelease(lock, ctx);
throw ie;
} catch (Throwable th) {
safeRelease(lock, ctx);
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock, ctx);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public void releaseLock(InvocationContext ctx, Object key) {
L l = locks.remove(key);
if (l != null)
unlock(l, ctx);
}
public Lock acquireLock(Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
Lock lock = getLock(key);
boolean locked = false;
try {
locked = lock.tryLock(timeout, unit);
} catch (InterruptedException ie) {
safeRelease(lock);
throw ie;
} catch (Throwable th) {
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
Lock existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}
Safe
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import org.infinispan.context.InvocationContext;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer<L extends Lock> extends AbstractLockContainer<L> {
protected final ConcurrentMap<Object, L> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, L>(16, .75f, concurrencyLevel);
}
protected abstract L newLock();
public final L getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
L lock = locks.get(key);
if (lock == null)
lock = newLock();
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null)
lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public L acquireLock(InvocationContext ctx, Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
L lock = getLock(key);
boolean locked;
try {
locked = tryLock(lock, timeout, unit, ctx);
} catch (InterruptedException ie) {
safeRelease(lock, ctx);
throw ie;
} catch (Throwable th) {
safeRelease(lock, ctx);
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock, ctx);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public void releaseLock(InvocationContext ctx, Object key) {
L l = locks.remove(key);
if (l != null)
unlock(l, ctx);
}
public Lock acquireLock(Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
Lock lock = getLock(key);
boolean locked = false;
try {
locked = lock.tryLock(timeout, unit);
} catch (InterruptedException ie) {
safeRelease(lock);
throw ie;
} catch (Throwable th) {
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
Lock existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import org.infinispan.context.InvocationContext;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer<L extends Lock> extends AbstractLockContainer<L> {
protected final ConcurrentMap<Object, L> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, L>(16, .75f, concurrencyLevel);
}
protected abstract L newLock();
public final L getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
L lock = locks.get(key);
if (lock == null)
lock = newLock();
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null)
lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public L acquireLock(InvocationContext ctx, Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
L lock = getLock(key);
boolean locked;
try {
locked = tryLock(lock, timeout, unit, ctx);
} catch (InterruptedException ie) {
safeRelease(lock, ctx);
throw ie;
} catch (Throwable th) {
safeRelease(lock, ctx);
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock, ctx);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public void releaseLock(InvocationContext ctx, Object key) {
L l = locks.remove(key);
if (l != null)
unlock(l, ctx);
}
public Lock acquireLock(Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
Lock lock = getLock(key);
boolean locked = false;
try {
locked = lock.tryLock(timeout, unit);
} catch (InterruptedException ie) {
safeRelease(lock);
throw ie;
} catch (Throwable th) {
locked = false;
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
Lock existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}
Unstructured
/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import org.infinispan.context.InvocationContext;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer<L extends Lock> extends AbstractLockContainer<L> {
protected final ConcurrentMap<Object, L> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, L>(16, .75f, concurrencyLevel);
}
protected abstract L newLock();
public final L getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
L lock = locks.get(key);
if (lock == null) lock = newLock();
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null) lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public L acquireLock(InvocationContext ctx, Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
L lock = getLock(key);
boolean locked;
try {
locked = tryLock(lock, timeout, unit, ctx);
} catch (InterruptedException ie) {
safeRelease(lock, ctx);
throw ie;
} catch (Throwable th) {
<<<<<<< MINE
safeRelease(lock, ctx);
locked = false;
=======
locked = false;
>>>>>>> YOURS
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock, ctx);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public void releaseLock(InvocationContext ctx, Object key) {
L l = locks.remove(key);
if (l != null) unlock(l, ctx);
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}/*
* JBoss, Home of Professional Open Source
* Copyright 2009 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.util.concurrent.locks.containers;
import org.infinispan.context.InvocationContext;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
/**
* An abstract lock container that creates and maintains a new lock per entry
*
* @author Manik Surtani
* @since 4.0
*/
public abstract class AbstractPerEntryLockContainer<L extends Lock> extends AbstractLockContainer<L> {
protected final ConcurrentMap<Object, L> locks;
protected AbstractPerEntryLockContainer(int concurrencyLevel) {
locks = new ConcurrentHashMap<Object, L>(16, .75f, concurrencyLevel);
}
protected abstract L newLock();
public final L getLock(Object key) {
// this is an optimisation. It is not foolproof as we may still be creating new locks unnecessarily (thrown away
// when we do a putIfAbsent) but it minimises the chances somewhat, for the cost of an extra CHM get.
L lock = locks.get(key);
if (lock == null) lock = newLock();
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null) lock = existingLock;
return lock;
}
public int getNumLocksHeld() {
return locks.size();
}
public int size() {
return locks.size();
}
public L acquireLock(InvocationContext ctx, Object key, long timeout, TimeUnit unit) throws InterruptedException {
while (true) {
L lock = getLock(key);
boolean locked;
try {
locked = tryLock(lock, timeout, unit, ctx);
} catch (InterruptedException ie) {
safeRelease(lock, ctx);
throw ie;
} catch (Throwable th) {
<<<<<<< MINE
safeRelease(lock, ctx);
locked = false;
=======
locked = false;
>>>>>>> YOURS
}
if (locked) {
// lock acquired. Now check if it is the *correct* lock!
L existingLock = locks.putIfAbsent(key, lock);
if (existingLock != null && existingLock != lock) {
// we have the wrong lock! Unlock and retry.
safeRelease(lock, ctx);
} else {
// we got the right lock.
return lock;
}
} else {
// we couldn't acquire the lock within the timeout period
return null;
}
}
}
public void releaseLock(InvocationContext ctx, Object key) {
L l = locks.remove(key);
if (l != null) unlock(l, ctx);
}
public int getLockId(Object key) {
return System.identityHashCode(getLock(key));
}
}
Diff Result
No diff
Case 8 - jedis.rev_155af_88e66.BinaryClient.java
public void zrange(final byte[] key, final int start, final int end)
Left modified signature parameter types: int → long
Right did not modify method
Unstructured reported no conflict
Safe reported conflict with zcount
MergeMethods reported conflict on body
KeepBothMethods did not report conflict
public void zcount(final byte[] key, final double min, final double max)
Left modified signature parameter types: double → byte[]
Right modified body and added overloaded versions of method
One overloaded signature had parameters (final byte[] key, final byte min[], final byte max[])
Unstructured reported conflict between close versions
Safe reported conflict between one version and zrange. Also kept two versions with equivalent signatures: FN, as does not compile.
public void zcount(final byte[] key, final byte[] min, final byte[] max)
public void zcount(final byte[] key, final byte min[], final byte max[])
MergeMethods kept two versions with equivalent signatures: FN, as does not compile.
public void zcount(final byte[] key, final byte[] min, final byte[] max)
public void zcount(final byte[] key, final byte min[], final byte max[])
KeepBothMethods kept two versions with equivalent signatures and same body: FN, as does not compile.
public void zcount(final byte[] key, final byte[] min, final byte[] max)
public void zcount(final byte[] key, final byte min[], final byte max[])
Base
package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE, AFTER;
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}
}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final byte[] field : hash.keySet()) {
params.add(field);
params.add(hash.get(field));
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[] field) {
sendCommand(HDEL, key, field);
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[] string) {
sendCommand(RPUSH, key, string);
}
public void lpush(final byte[] key, final byte[] string) {
sendCommand(LPUSH, key, string);
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[] member) {
sendCommand(SADD, key, member);
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[] member) {
sendCommand(SREM, key, member);
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey,
final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zrange(final byte[] key, final int start, final int end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrem(final byte[] key, final byte[] member) {
sendCommand(ZREM, key, member);
}
public void zincrby(final byte[] key, final double score,
final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final int start, final int end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final int start,
final int end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final int start,
final int end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void sort(final byte[] key, final SortingParams sortingParameters,
final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
public void zcount(final byte[] key, final double min, final double max) {
sendCommand(ZCOUNT, key, toByteArray(min), toByteArray(max));
}
public void zrangeByScore(final byte[] key, final double min,
final double max) {
sendCommand(ZRANGEBYSCORE, key, toByteArray(min), toByteArray(max));
}
public void zrevrangeByScore(final byte[] key, final double max,
final double min) {
sendCommand(ZREVRANGEBYSCORE, key, toByteArray(max), toByteArray(min));
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
public void zrangeByScore(final byte[] key, final double min,
final double max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, toByteArray(min), toByteArray(max),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final double max,
final double min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, toByteArray(max), toByteArray(min),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max) {
sendCommand(ZRANGEBYSCORE, key, toByteArray(min), toByteArray(max),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min) {
sendCommand(ZREVRANGEBYSCORE, key, toByteArray(max), toByteArray(min),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, toByteArray(min), toByteArray(max),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, toByteArray(max), toByteArray(min),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zremrangeByRank(final byte[] key, final int start, final int end) {
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final double start,
final double end) {
sendCommand(ZREMRANGEBYSCORE, key, toByteArray(start), toByteArray(end));
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final String pattern) {
sendCommand(CONFIG, Keyword.GET.name(), pattern);
}
public void configSet(final String parameter, final String value) {
sendCommand(CONFIG, Keyword.SET.name(), parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[] string) {
sendCommand(LPUSHX, key, string);
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[] string) {
sendCommand(RPUSHX, key, string);
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where,
final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination,
final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset),
toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
}
package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE, AFTER;
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}
}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final byte[] field : hash.keySet()) {
params.add(field);
params.add(hash.get(field));
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[] field) {
sendCommand(HDEL, key, field);
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[] string) {
sendCommand(RPUSH, key, string);
}
public void lpush(final byte[] key, final byte[] string) {
sendCommand(LPUSH, key, string);
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[] member) {
sendCommand(SADD, key, member);
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[] member) {
sendCommand(SREM, key, member);
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey,
final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zrange(final byte[] key, final int start, final int end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrem(final byte[] key, final byte[] member) {
sendCommand(ZREM, key, member);
}
public void zincrby(final byte[] key, final double score,
final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final int start, final int end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final int start,
final int end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final int start,
final int end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void sort(final byte[] key, final SortingParams sortingParameters,
final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
public void zcount(final byte[] key, final double min, final double max) {
sendCommand(ZCOUNT, key, toByteArray(min), toByteArray(max));
}
public void zrangeByScore(final byte[] key, final double min,
final double max) {
sendCommand(ZRANGEBYSCORE, key, toByteArray(min), toByteArray(max));
}
public void zrevrangeByScore(final byte[] key, final double max,
final double min) {
sendCommand(ZREVRANGEBYSCORE, key, toByteArray(max), toByteArray(min));
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
public void zrangeByScore(final byte[] key, final double min,
final double max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, toByteArray(min), toByteArray(max),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final double max,
final double min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, toByteArray(max), toByteArray(min),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max) {
sendCommand(ZRANGEBYSCORE, key, toByteArray(min), toByteArray(max),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min) {
sendCommand(ZREVRANGEBYSCORE, key, toByteArray(max), toByteArray(min),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, toByteArray(min), toByteArray(max),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, toByteArray(max), toByteArray(min),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zremrangeByRank(final byte[] key, final int start, final int end) {
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final double start,
final double end) {
sendCommand(ZREMRANGEBYSCORE, key, toByteArray(start), toByteArray(end));
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final String pattern) {
sendCommand(CONFIG, Keyword.GET.name(), pattern);
}
public void configSet(final String parameter, final String value) {
sendCommand(CONFIG, Keyword.SET.name(), parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[] string) {
sendCommand(LPUSHX, key, string);
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[] string) {
sendCommand(RPUSHX, key, string);
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where,
final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination,
final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset),
toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
}
Left
package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.ENCODING;
import static redis.clients.jedis.Protocol.Keyword.IDLETIME;
import static redis.clients.jedis.Protocol.Keyword.LEN;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.REFCOUNT;
import static redis.clients.jedis.Protocol.Keyword.RESET;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE, AFTER;
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}
}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
private byte[][] joinParameters(byte[] first, byte[][] rest) {
byte[][] result = new byte[rest.length + 1][];
result[0] = first;
for (int i = 0; i < rest.length; i++) {
result[i + 1] = rest[i];
}
return result;
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(Command.PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final Entry<byte[], byte[]> entry : hash.entrySet()) {
params.add(entry.getKey());
params.add(entry.getValue());
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[]... fields) {
sendCommand(HDEL, joinParameters(key, fields));
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[]... strings) {
sendCommand(RPUSH, joinParameters(key, strings));
}
public void lpush(final byte[] key, final byte[]... strings) {
sendCommand(LPUSH, joinParameters(key, strings));
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[]... members) {
sendCommand(SADD, joinParameters(key, members));
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[]... members) {
sendCommand(SREM, joinParameters(key, members));
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey,
final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zaddBinary(final byte[] key, Map<Double, byte[]> scoreMembers) {
ArrayList<byte[]> args = new ArrayList<byte[]>(
scoreMembers.size() * 2 + 1);
args.add(key);
for (Map.Entry<Double, byte[]> entry : scoreMembers.entrySet()) {
args.add(toByteArray(entry.getKey()));
args.add(entry.getValue());
}
byte[][] argsArray = new byte[args.size()][];
args.toArray(argsArray);
sendCommand(ZADD, argsArray);
}
public void zrange(final byte[] key, final long start, final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrem(final byte[] key, final byte[]... members) {
sendCommand(ZREM, joinParameters(key, members));
}
public void zincrby(final byte[] key, final double score,
final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final long start, final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void blpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
blpop(args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final SortingParams sortingParameters,
final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void brpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
brpop(args.toArray(new byte[args.size()][]));
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
public void zcount(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZCOUNT, key, min, max);
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count));
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max, WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zremrangeByRank(final byte[] key, final long start,
final long end) {
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final byte[] start,
final byte[] end) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void info(final String section) {
sendCommand(INFO, section);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final byte[] pattern) {
sendCommand(CONFIG, Keyword.GET.raw, pattern);
}
public void configSet(final byte[] parameter, final byte[] value) {
sendCommand(CONFIG, Keyword.SET.raw, parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[]... string) {
sendCommand(LPUSHX, joinParameters(key, string));
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[]... string) {
sendCommand(RPUSHX, joinParameters(key, string));
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where,
final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination,
final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void setbit(byte[] key, long offset, boolean value) {
sendCommand(SETBIT, key, toByteArray(offset), toByteArray(value));
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset),
toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
private void sendEvalCommand(Command command, byte[] script,
byte[] keyCount, byte[][] params) {
final byte[][] allArgs = new byte[params.length + 2][];
allArgs[0] = script;
allArgs[1] = keyCount;
for (int i = 0; i < params.length; i++)
allArgs[i + 2] = params[i];
sendCommand(command, allArgs);
}
public void eval(byte[] script, byte[] keyCount, byte[][] params) {
sendEvalCommand(EVAL, script, keyCount, params);
}
public void eval(byte[] script, int keyCount, byte[]... params) {
eval(script, toByteArray(keyCount), params);
}
public void evalsha(byte[] sha1, byte[] keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, keyCount, params);
}
public void evalsha(byte[] sha1, int keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, toByteArray(keyCount), params);
}
public void scriptFlush() {
sendCommand(SCRIPT, Keyword.FLUSH.raw);
}
public void scriptExists(byte[]... sha1) {
byte[][] args = new byte[sha1.length + 1][];
args[0] = Keyword.EXISTS.raw;
for (int i = 0; i < sha1.length; i++)
args[i + 1] = sha1[i];
sendCommand(SCRIPT, args);
}
public void scriptLoad(byte[] script) {
sendCommand(SCRIPT, Keyword.LOAD.raw, script);
}
public void scriptKill() {
sendCommand(SCRIPT, Keyword.KILL.raw);
}
public void slowlogGet() {
sendCommand(SLOWLOG, Keyword.GET.raw);
}
public void slowlogGet(long entries) {
sendCommand(SLOWLOG, Keyword.GET.raw, toByteArray(entries));
}
public void slowlogReset() {
sendCommand(SLOWLOG, RESET.raw);
}
public void slowlogLen() {
sendCommand(SLOWLOG, LEN.raw);
}
public void objectRefcount(byte[] key) {
sendCommand(OBJECT, REFCOUNT.raw, key);
}
public void objectIdletime(byte[] key) {
sendCommand(OBJECT, IDLETIME.raw, key);
}
public void objectEncoding(byte[] key) {
sendCommand(OBJECT, ENCODING.raw, key);
}
public void bitcount(byte[] key) {
sendCommand(BITCOUNT, key);
}
public void bitcount(byte[] key, long start, long end) {
sendCommand(BITCOUNT, key, toByteArray(start), toByteArray(end));
}
public void bitop(BitOP op, byte[] destKey, byte[]... srcKeys) {
Keyword kw = Keyword.AND;
int len = srcKeys.length;
switch (op) {
case AND:
kw = Keyword.AND;
break;
case OR:
kw = Keyword.OR;
break;
case XOR:
kw = Keyword.XOR;
break;
case NOT:
kw = Keyword.NOT;
len = Math.min(1, len);
break;
}
byte[][] bargs = new byte[len + 2][];
bargs[0] = kw.raw;
bargs[1] = destKey;
for (int i = 0; i < len; ++i) {
bargs[i + 2] = srcKeys[i];
}
sendCommand(BITOP, bargs);
}
public void sentinel(final byte[]... args) {
sendCommand(SENTINEL, args);
}
public void dump(final byte[] key) {
sendCommand(DUMP, key);
}
public void restore(final byte[] key, final int ttl, final byte[] serializedValue) {
sendCommand(RESTORE, key, toByteArray(ttl), serializedValue);
}
public void pexpire(final byte[] key, final int milliseconds) {
sendCommand(PEXPIRE, key, toByteArray(milliseconds));
}
public void pexpireAt(final byte[] key, final long millisecondsTimestamp) {
sendCommand(PEXPIREAT, key, toByteArray(millisecondsTimestamp));
}
public void pttl(final byte[] key) {
sendCommand(PTTL, key);
}
public void incrByFloat(final byte[] key, final double increment) {
sendCommand(INCRBYFLOAT, key, toByteArray(increment));
}
public void psetex(final byte[] key, final int milliseconds, final byte[] value) {
sendCommand(PSETEX, key, toByteArray(milliseconds), value);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx) {
sendCommand(Command.SET, key, value, nxxx);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx, final byte[] expx, final int time) {
sendCommand(Command.SET, key, value, nxxx, expx, toByteArray(time));
}
public void srandmember(final byte[] key, final int count) {
sendCommand(SRANDMEMBER, key, toByteArray(count));
}
public void clientKill(final byte[] client) {
sendCommand(CLIENT, Keyword.KILL.raw, client);
}
public void clientGetname() {
sendCommand(CLIENT, Keyword.GETNAME.raw);
}
public void clientList() {
sendCommand(CLIENT, Keyword.LIST.raw);
}
public void clientSetname(final byte[] name) {
sendCommand(CLIENT, Keyword.SETNAME.raw, name);
}
public void time() {
sendCommand(TIME);
}
public void migrate(final byte[] host, final int port, final byte[] key, final int destinationDb, final int timeout) {
sendCommand(MIGRATE, host, toByteArray(port), key, toByteArray(destinationDb), toByteArray(timeout));
}
public void hincrByFloat(final byte[] key, final byte[] field, double increment) {
sendCommand(HINCRBYFLOAT, key, field, toByteArray(increment));
}
}
package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.ENCODING;
import static redis.clients.jedis.Protocol.Keyword.IDLETIME;
import static redis.clients.jedis.Protocol.Keyword.LEN;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.REFCOUNT;
import static redis.clients.jedis.Protocol.Keyword.RESET;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE, AFTER;
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}
}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
private byte[][] joinParameters(byte[] first, byte[][] rest) {
byte[][] result = new byte[rest.length + 1][];
result[0] = first;
for (int i = 0; i < rest.length; i++) {
result[i + 1] = rest[i];
}
return result;
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(Command.PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final Entry<byte[], byte[]> entry : hash.entrySet()) {
params.add(entry.getKey());
params.add(entry.getValue());
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[]... fields) {
sendCommand(HDEL, joinParameters(key, fields));
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[]... strings) {
sendCommand(RPUSH, joinParameters(key, strings));
}
public void lpush(final byte[] key, final byte[]... strings) {
sendCommand(LPUSH, joinParameters(key, strings));
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[]... members) {
sendCommand(SADD, joinParameters(key, members));
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[]... members) {
sendCommand(SREM, joinParameters(key, members));
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey,
final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zaddBinary(final byte[] key, Map<Double, byte[]> scoreMembers) {
ArrayList<byte[]> args = new ArrayList<byte[]>(
scoreMembers.size() * 2 + 1);
args.add(key);
for (Map.Entry<Double, byte[]> entry : scoreMembers.entrySet()) {
args.add(toByteArray(entry.getKey()));
args.add(entry.getValue());
}
byte[][] argsArray = new byte[args.size()][];
args.toArray(argsArray);
sendCommand(ZADD, argsArray);
}
public void zrange(final byte[] key, final long start, final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrem(final byte[] key, final byte[]... members) {
sendCommand(ZREM, joinParameters(key, members));
}
public void zincrby(final byte[] key, final double score,
final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final long start, final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void blpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
blpop(args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final SortingParams sortingParameters,
final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void brpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
brpop(args.toArray(new byte[args.size()][]));
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
public void zcount(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZCOUNT, key, min, max);
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count));
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max, WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zremrangeByRank(final byte[] key, final long start,
final long end) {
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final byte[] start,
final byte[] end) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void info(final String section) {
sendCommand(INFO, section);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final byte[] pattern) {
sendCommand(CONFIG, Keyword.GET.raw, pattern);
}
public void configSet(final byte[] parameter, final byte[] value) {
sendCommand(CONFIG, Keyword.SET.raw, parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[]... string) {
sendCommand(LPUSHX, joinParameters(key, string));
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[]... string) {
sendCommand(RPUSHX, joinParameters(key, string));
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where,
final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination,
final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void setbit(byte[] key, long offset, boolean value) {
sendCommand(SETBIT, key, toByteArray(offset), toByteArray(value));
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset),
toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
private void sendEvalCommand(Command command, byte[] script,
byte[] keyCount, byte[][] params) {
final byte[][] allArgs = new byte[params.length + 2][];
allArgs[0] = script;
allArgs[1] = keyCount;
for (int i = 0; i < params.length; i++)
allArgs[i + 2] = params[i];
sendCommand(command, allArgs);
}
public void eval(byte[] script, byte[] keyCount, byte[][] params) {
sendEvalCommand(EVAL, script, keyCount, params);
}
public void eval(byte[] script, int keyCount, byte[]... params) {
eval(script, toByteArray(keyCount), params);
}
public void evalsha(byte[] sha1, byte[] keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, keyCount, params);
}
public void evalsha(byte[] sha1, int keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, toByteArray(keyCount), params);
}
public void scriptFlush() {
sendCommand(SCRIPT, Keyword.FLUSH.raw);
}
public void scriptExists(byte[]... sha1) {
byte[][] args = new byte[sha1.length + 1][];
args[0] = Keyword.EXISTS.raw;
for (int i = 0; i < sha1.length; i++)
args[i + 1] = sha1[i];
sendCommand(SCRIPT, args);
}
public void scriptLoad(byte[] script) {
sendCommand(SCRIPT, Keyword.LOAD.raw, script);
}
public void scriptKill() {
sendCommand(SCRIPT, Keyword.KILL.raw);
}
public void slowlogGet() {
sendCommand(SLOWLOG, Keyword.GET.raw);
}
public void slowlogGet(long entries) {
sendCommand(SLOWLOG, Keyword.GET.raw, toByteArray(entries));
}
public void slowlogReset() {
sendCommand(SLOWLOG, RESET.raw);
}
public void slowlogLen() {
sendCommand(SLOWLOG, LEN.raw);
}
public void objectRefcount(byte[] key) {
sendCommand(OBJECT, REFCOUNT.raw, key);
}
public void objectIdletime(byte[] key) {
sendCommand(OBJECT, IDLETIME.raw, key);
}
public void objectEncoding(byte[] key) {
sendCommand(OBJECT, ENCODING.raw, key);
}
public void bitcount(byte[] key) {
sendCommand(BITCOUNT, key);
}
public void bitcount(byte[] key, long start, long end) {
sendCommand(BITCOUNT, key, toByteArray(start), toByteArray(end));
}
public void bitop(BitOP op, byte[] destKey, byte[]... srcKeys) {
Keyword kw = Keyword.AND;
int len = srcKeys.length;
switch (op) {
case AND:
kw = Keyword.AND;
break;
case OR:
kw = Keyword.OR;
break;
case XOR:
kw = Keyword.XOR;
break;
case NOT:
kw = Keyword.NOT;
len = Math.min(1, len);
break;
}
byte[][] bargs = new byte[len + 2][];
bargs[0] = kw.raw;
bargs[1] = destKey;
for (int i = 0; i < len; ++i) {
bargs[i + 2] = srcKeys[i];
}
sendCommand(BITOP, bargs);
}
public void sentinel(final byte[]... args) {
sendCommand(SENTINEL, args);
}
public void dump(final byte[] key) {
sendCommand(DUMP, key);
}
public void restore(final byte[] key, final int ttl, final byte[] serializedValue) {
sendCommand(RESTORE, key, toByteArray(ttl), serializedValue);
}
public void pexpire(final byte[] key, final int milliseconds) {
sendCommand(PEXPIRE, key, toByteArray(milliseconds));
}
public void pexpireAt(final byte[] key, final long millisecondsTimestamp) {
sendCommand(PEXPIREAT, key, toByteArray(millisecondsTimestamp));
}
public void pttl(final byte[] key) {
sendCommand(PTTL, key);
}
public void incrByFloat(final byte[] key, final double increment) {
sendCommand(INCRBYFLOAT, key, toByteArray(increment));
}
public void psetex(final byte[] key, final int milliseconds, final byte[] value) {
sendCommand(PSETEX, key, toByteArray(milliseconds), value);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx) {
sendCommand(Command.SET, key, value, nxxx);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx, final byte[] expx, final int time) {
sendCommand(Command.SET, key, value, nxxx, expx, toByteArray(time));
}
public void srandmember(final byte[] key, final int count) {
sendCommand(SRANDMEMBER, key, toByteArray(count));
}
public void clientKill(final byte[] client) {
sendCommand(CLIENT, Keyword.KILL.raw, client);
}
public void clientGetname() {
sendCommand(CLIENT, Keyword.GETNAME.raw);
}
public void clientList() {
sendCommand(CLIENT, Keyword.LIST.raw);
}
public void clientSetname(final byte[] name) {
sendCommand(CLIENT, Keyword.SETNAME.raw, name);
}
public void time() {
sendCommand(TIME);
}
public void migrate(final byte[] host, final int port, final byte[] key, final int destinationDb, final int timeout) {
sendCommand(MIGRATE, host, toByteArray(port), key, toByteArray(destinationDb), toByteArray(timeout));
}
public void hincrByFloat(final byte[] key, final byte[] field, double increment) {
sendCommand(HINCRBYFLOAT, key, field, toByteArray(increment));
}
}
Right
package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE, AFTER;
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}
}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final byte[] field : hash.keySet()) {
params.add(field);
params.add(hash.get(field));
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[] field) {
sendCommand(HDEL, key, field);
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[] string) {
sendCommand(RPUSH, key, string);
}
public void lpush(final byte[] key, final byte[] string) {
sendCommand(LPUSH, key, string);
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[] member) {
sendCommand(SADD, key, member);
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[] member) {
sendCommand(SREM, key, member);
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey,
final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zrange(final byte[] key, final int start, final int end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrem(final byte[] key, final byte[] member) {
sendCommand(ZREM, key, member);
}
public void zincrby(final byte[] key, final double score,
final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final int start, final int end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final int start,
final int end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final int start,
final int end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void sort(final byte[] key, final SortingParams sortingParameters,
final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
public void zcount(final byte[] key, final double min, final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZCOUNT, key, byteArrayMin, byteArrayMax);
}
public void zcount(final byte[] key, final byte min[], final byte max[]) {
sendCommand(ZCOUNT, key, min, max);
}
public void zcount(final byte[] key, final String min, final String max) {
sendCommand(ZCOUNT, key, min.getBytes(), max.getBytes());
}
public void zrangeByScore(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax);
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrangeByScore(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes());
}
public void zrevrangeByScore(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin);
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes());
}
public void zrangeByScore(final byte[] key, final double min,
final double max, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final byte min[],
final byte max[], final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final String min,
final String max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final double max,
final double min, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final byte max[],
final byte min[], final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[]) {
sendCommand(ZRANGEBYSCORE, key, min, max,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[]) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[], final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[], final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zremrangeByRank(final byte[] key, final int start, final int end) {
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final double start,
final double end) {
sendCommand(ZREMRANGEBYSCORE, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final byte start[],
final byte end[]) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zremrangeByScore(final byte[] key, final String start,
final String end) {
sendCommand(ZREMRANGEBYSCORE, key, start.getBytes(), end.getBytes());
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final String pattern) {
sendCommand(CONFIG, Keyword.GET.name(), pattern);
}
public void configSet(final String parameter, final String value) {
sendCommand(CONFIG, Keyword.SET.name(), parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[] string) {
sendCommand(LPUSHX, key, string);
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[] string) {
sendCommand(RPUSHX, key, string);
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where,
final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination,
final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset),
toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
}
package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE, AFTER;
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}
}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final byte[] field : hash.keySet()) {
params.add(field);
params.add(hash.get(field));
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[] field) {
sendCommand(HDEL, key, field);
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[] string) {
sendCommand(RPUSH, key, string);
}
public void lpush(final byte[] key, final byte[] string) {
sendCommand(LPUSH, key, string);
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[] member) {
sendCommand(SADD, key, member);
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[] member) {
sendCommand(SREM, key, member);
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey,
final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zrange(final byte[] key, final int start, final int end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrem(final byte[] key, final byte[] member) {
sendCommand(ZREM, key, member);
}
public void zincrby(final byte[] key, final double score,
final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final int start, final int end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final int start,
final int end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final int start,
final int end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void sort(final byte[] key, final SortingParams sortingParameters,
final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
public void zcount(final byte[] key, final double min, final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZCOUNT, key, byteArrayMin, byteArrayMax);
}
public void zcount(final byte[] key, final byte min[], final byte max[]) {
sendCommand(ZCOUNT, key, min, max);
}
public void zcount(final byte[] key, final String min, final String max) {
sendCommand(ZCOUNT, key, min.getBytes(), max.getBytes());
}
public void zrangeByScore(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax);
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrangeByScore(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes());
}
public void zrevrangeByScore(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin);
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes());
}
public void zrangeByScore(final byte[] key, final double min,
final double max, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final byte min[],
final byte max[], final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final String min,
final String max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final double max,
final double min, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final byte max[],
final byte min[], final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[]) {
sendCommand(ZRANGEBYSCORE, key, min, max,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[]) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[], final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[], final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zremrangeByRank(final byte[] key, final int start, final int end) {
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final double start,
final double end) {
sendCommand(ZREMRANGEBYSCORE, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final byte start[],
final byte end[]) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zremrangeByScore(final byte[] key, final String start,
final String end) {
sendCommand(ZREMRANGEBYSCORE, key, start.getBytes(), end.getBytes());
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final String pattern) {
sendCommand(CONFIG, Keyword.GET.name(), pattern);
}
public void configSet(final String parameter, final String value) {
sendCommand(CONFIG, Keyword.SET.name(), parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[] string) {
sendCommand(LPUSHX, key, string);
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[] string) {
sendCommand(RPUSHX, key, string);
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where,
final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination,
final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset),
toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
}
MergeMethods
package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.ENCODING;
import static redis.clients.jedis.Protocol.Keyword.IDLETIME;
import static redis.clients.jedis.Protocol.Keyword.LEN;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.REFCOUNT;
import static redis.clients.jedis.Protocol.Keyword.RESET;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE , AFTER;
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
private byte[][] joinParameters(byte[] first, byte[][] rest) {
byte[][] result = new byte[rest.length + 1][];
result[0] = first;
for (int i = 0; i < rest.length; i++) {
result[i + 1] = rest[i];
}
return result;
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(Command.PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final Entry<byte[], byte[]> entry : hash.entrySet()) {
params.add(entry.getKey());
params.add(entry.getValue());
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[]... fields) {
sendCommand(HDEL, joinParameters(key, fields));
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[]... strings) {
sendCommand(RPUSH, joinParameters(key, strings));
}
public void lpush(final byte[] key, final byte[]... strings) {
sendCommand(LPUSH, joinParameters(key, strings));
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[]... members) {
sendCommand(SADD, joinParameters(key, members));
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[]... members) {
sendCommand(SREM, joinParameters(key, members));
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey,
final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zaddBinary(final byte[] key, Map<Double, byte[]> scoreMembers) {
ArrayList<byte[]> args = new ArrayList<byte[]>(
scoreMembers.size() * 2 + 1);
args.add(key);
for (Map.Entry<Double, byte[]> entry : scoreMembers.entrySet()) {
args.add(toByteArray(entry.getKey()));
args.add(entry.getValue());
}
byte[][] argsArray = new byte[args.size()][];
args.toArray(argsArray);
sendCommand(ZADD, argsArray);
}
public void zrem(final byte[] key, final byte[]... members) {
sendCommand(ZREM, joinParameters(key, members));
}
public void zincrby(final byte[] key, final double score,
final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final long start, final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void blpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
blpop(args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final SortingParams sortingParameters,
final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void brpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
brpop(args.toArray(new byte[args.size()][]));
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
public void zcount(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZCOUNT, key, min, max);
}
public void zrange(final byte[] key, final long start, final long end) {
<<<<<<< MINE
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
=======
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZCOUNT, key, byteArrayMin, byteArrayMax);
>>>>>>> YOURS
}
public void zcount(final byte[] key, final byte min[], final byte max[]) {
sendCommand(ZCOUNT, key, min, max);
}
public void zcount(final byte[] key, final String min, final String max) {
sendCommand(ZCOUNT, key, min.getBytes(), max.getBytes());
}
public void zrangeByScore(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax);
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrangeByScore(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes());
}
public void zremrangeByRank(final byte[] key, final long start,
final long end) {
<<<<<<< MINE
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
=======
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin);
>>>>>>> YOURS
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max, WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, WITHSCORES.raw);
}
public void zremrangeByScore(final byte[] key, final byte[] start,
final byte[] end) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes());
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max, final int offset, int count) {
<<<<<<< MINE
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count));
=======
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count));
>>>>>>> YOURS
}
public void zrangeByScore(final byte[] key, final byte min[],
final byte max[], final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final String min,
final String max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min, final int offset, int count) {
<<<<<<< MINE
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count));
=======
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count));
>>>>>>> YOURS
}
public void zrevrangeByScore(final byte[] key, final byte max[],
final byte min[], final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[]) {
sendCommand(ZRANGEBYSCORE, key, min, max,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[]) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max, final int offset, final int count) {
<<<<<<< MINE
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
=======
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
>>>>>>> YOURS
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[], final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min, final int offset, final int count) {
<<<<<<< MINE
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
=======
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
>>>>>>> YOURS
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[], final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zremrangeByScore(final byte[] key, final byte start[],
final byte end[]) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zremrangeByScore(final byte[] key, final String start,
final String end) {
sendCommand(ZREMRANGEBYSCORE, key, start.getBytes(), end.getBytes());
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void info(final String section) {
sendCommand(INFO, section);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final byte[] pattern) {
sendCommand(CONFIG, Keyword.GET.raw, pattern);
}
public void configSet(final byte[] parameter, final byte[] value) {
sendCommand(CONFIG, Keyword.SET.raw, parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[]... string) {
sendCommand(LPUSHX, joinParameters(key, string));
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[]... string) {
sendCommand(RPUSHX, joinParameters(key, string));
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where,
final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination,
final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void setbit(byte[] key, long offset, boolean value) {
sendCommand(SETBIT, key, toByteArray(offset), toByteArray(value));
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset),
toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
private void sendEvalCommand(Command command, byte[] script,
byte[] keyCount, byte[][] params) {
final byte[][] allArgs = new byte[params.length + 2][];
allArgs[0] = script;
allArgs[1] = keyCount;
for (int i = 0; i < params.length; i++)
allArgs[i + 2] = params[i];
sendCommand(command, allArgs);
}
public void eval(byte[] script, byte[] keyCount, byte[][] params) {
sendEvalCommand(EVAL, script, keyCount, params);
}
public void eval(byte[] script, int keyCount, byte[]... params) {
eval(script, toByteArray(keyCount), params);
}
public void evalsha(byte[] sha1, byte[] keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, keyCount, params);
}
public void evalsha(byte[] sha1, int keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, toByteArray(keyCount), params);
}
public void scriptFlush() {
sendCommand(SCRIPT, Keyword.FLUSH.raw);
}
public void scriptExists(byte[]... sha1) {
byte[][] args = new byte[sha1.length + 1][];
args[0] = Keyword.EXISTS.raw;
for (int i = 0; i < sha1.length; i++)
args[i + 1] = sha1[i];
sendCommand(SCRIPT, args);
}
public void scriptLoad(byte[] script) {
sendCommand(SCRIPT, Keyword.LOAD.raw, script);
}
public void scriptKill() {
sendCommand(SCRIPT, Keyword.KILL.raw);
}
public void slowlogGet() {
sendCommand(SLOWLOG, Keyword.GET.raw);
}
public void slowlogGet(long entries) {
sendCommand(SLOWLOG, Keyword.GET.raw, toByteArray(entries));
}
public void slowlogReset() {
sendCommand(SLOWLOG, RESET.raw);
}
public void slowlogLen() {
sendCommand(SLOWLOG, LEN.raw);
}
public void objectRefcount(byte[] key) {
sendCommand(OBJECT, REFCOUNT.raw, key);
}
public void objectIdletime(byte[] key) {
sendCommand(OBJECT, IDLETIME.raw, key);
}
public void objectEncoding(byte[] key) {
sendCommand(OBJECT, ENCODING.raw, key);
}
public void bitcount(byte[] key) {
sendCommand(BITCOUNT, key);
}
public void bitcount(byte[] key, long start, long end) {
sendCommand(BITCOUNT, key, toByteArray(start), toByteArray(end));
}
public void bitop(BitOP op, byte[] destKey, byte[]... srcKeys) {
Keyword kw = Keyword.AND;
int len = srcKeys.length;
switch (op) {
case AND:
kw = Keyword.AND;
break;
case OR:
kw = Keyword.OR;
break;
case XOR:
kw = Keyword.XOR;
break;
case NOT:
kw = Keyword.NOT;
len = Math.min(1, len);
break;
}
byte[][] bargs = new byte[len + 2][];
bargs[0] = kw.raw;
bargs[1] = destKey;
for (int i = 0; i < len; ++i) {
bargs[i + 2] = srcKeys[i];
}
sendCommand(BITOP, bargs);
}
public void sentinel(final byte[]... args) {
sendCommand(SENTINEL, args);
}
public void dump(final byte[] key) {
sendCommand(DUMP, key);
}
public void restore(final byte[] key, final int ttl, final byte[] serializedValue) {
sendCommand(RESTORE, key, toByteArray(ttl), serializedValue);
}
public void pexpire(final byte[] key, final int milliseconds) {
sendCommand(PEXPIRE, key, toByteArray(milliseconds));
}
public void pexpireAt(final byte[] key, final long millisecondsTimestamp) {
sendCommand(PEXPIREAT, key, toByteArray(millisecondsTimestamp));
}
public void pttl(final byte[] key) {
sendCommand(PTTL, key);
}
public void incrByFloat(final byte[] key, final double increment) {
sendCommand(INCRBYFLOAT, key, toByteArray(increment));
}
public void psetex(final byte[] key, final int milliseconds, final byte[] value) {
sendCommand(PSETEX, key, toByteArray(milliseconds), value);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx) {
sendCommand(Command.SET, key, value, nxxx);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx, final byte[] expx, final int time) {
sendCommand(Command.SET, key, value, nxxx, expx, toByteArray(time));
}
public void srandmember(final byte[] key, final int count) {
sendCommand(SRANDMEMBER, key, toByteArray(count));
}
public void clientKill(final byte[] client) {
sendCommand(CLIENT, Keyword.KILL.raw, client);
}
public void clientGetname() {
sendCommand(CLIENT, Keyword.GETNAME.raw);
}
public void clientList() {
sendCommand(CLIENT, Keyword.LIST.raw);
}
public void clientSetname(final byte[] name) {
sendCommand(CLIENT, Keyword.SETNAME.raw, name);
}
public void time() {
sendCommand(TIME);
}
public void migrate(final byte[] host, final int port, final byte[] key, final int destinationDb, final int timeout) {
sendCommand(MIGRATE, host, toByteArray(port), key, toByteArray(destinationDb), toByteArray(timeout));
}
public void hincrByFloat(final byte[] key, final byte[] field, double increment) {
sendCommand(HINCRBYFLOAT, key, field, toByteArray(increment));
}
}
package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.ENCODING;
import static redis.clients.jedis.Protocol.Keyword.IDLETIME;
import static redis.clients.jedis.Protocol.Keyword.LEN;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.REFCOUNT;
import static redis.clients.jedis.Protocol.Keyword.RESET;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE , AFTER;
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
private byte[][] joinParameters(byte[] first, byte[][] rest) {
byte[][] result = new byte[rest.length + 1][];
result[0] = first;
for (int i = 0; i < rest.length; i++) {
result[i + 1] = rest[i];
}
return result;
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(Command.PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final Entry<byte[], byte[]> entry : hash.entrySet()) {
params.add(entry.getKey());
params.add(entry.getValue());
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[]... fields) {
sendCommand(HDEL, joinParameters(key, fields));
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[]... strings) {
sendCommand(RPUSH, joinParameters(key, strings));
}
public void lpush(final byte[] key, final byte[]... strings) {
sendCommand(LPUSH, joinParameters(key, strings));
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[]... members) {
sendCommand(SADD, joinParameters(key, members));
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[]... members) {
sendCommand(SREM, joinParameters(key, members));
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey,
final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zaddBinary(final byte[] key, Map<Double, byte[]> scoreMembers) {
ArrayList<byte[]> args = new ArrayList<byte[]>(
scoreMembers.size() * 2 + 1);
args.add(key);
for (Map.Entry<Double, byte[]> entry : scoreMembers.entrySet()) {
args.add(toByteArray(entry.getKey()));
args.add(entry.getValue());
}
byte[][] argsArray = new byte[args.size()][];
args.toArray(argsArray);
sendCommand(ZADD, argsArray);
}
public void zrem(final byte[] key, final byte[]... members) {
sendCommand(ZREM, joinParameters(key, members));
}
public void zincrby(final byte[] key, final double score,
final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final long start, final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void blpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
blpop(args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final SortingParams sortingParameters,
final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void brpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
brpop(args.toArray(new byte[args.size()][]));
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
public void zcount(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZCOUNT, key, min, max);
}
public void zrange(final byte[] key, final long start, final long end) {
<<<<<<< MINE
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
=======
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZCOUNT, key, byteArrayMin, byteArrayMax);
>>>>>>> YOURS
}
public void zcount(final byte[] key, final byte min[], final byte max[]) {
sendCommand(ZCOUNT, key, min, max);
}
public void zcount(final byte[] key, final String min, final String max) {
sendCommand(ZCOUNT, key, min.getBytes(), max.getBytes());
}
public void zrangeByScore(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax);
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrangeByScore(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes());
}
public void zremrangeByRank(final byte[] key, final long start,
final long end) {
<<<<<<< MINE
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
=======
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin);
>>>>>>> YOURS
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max, WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, WITHSCORES.raw);
}
public void zremrangeByScore(final byte[] key, final byte[] start,
final byte[] end) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes());
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max, final int offset, int count) {
<<<<<<< MINE
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count));
=======
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count));
>>>>>>> YOURS
}
public void zrangeByScore(final byte[] key, final byte min[],
final byte max[], final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final String min,
final String max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min, final int offset, int count) {
<<<<<<< MINE
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count));
=======
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count));
>>>>>>> YOURS
}
public void zrevrangeByScore(final byte[] key, final byte max[],
final byte min[], final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[]) {
sendCommand(ZRANGEBYSCORE, key, min, max,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[]) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max, final int offset, final int count) {
<<<<<<< MINE
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
=======
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
>>>>>>> YOURS
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[], final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min, final int offset, final int count) {
<<<<<<< MINE
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
=======
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
>>>>>>> YOURS
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[], final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zremrangeByScore(final byte[] key, final byte start[],
final byte end[]) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zremrangeByScore(final byte[] key, final String start,
final String end) {
sendCommand(ZREMRANGEBYSCORE, key, start.getBytes(), end.getBytes());
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void info(final String section) {
sendCommand(INFO, section);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final byte[] pattern) {
sendCommand(CONFIG, Keyword.GET.raw, pattern);
}
public void configSet(final byte[] parameter, final byte[] value) {
sendCommand(CONFIG, Keyword.SET.raw, parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[]... string) {
sendCommand(LPUSHX, joinParameters(key, string));
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[]... string) {
sendCommand(RPUSHX, joinParameters(key, string));
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where,
final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination,
final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void setbit(byte[] key, long offset, boolean value) {
sendCommand(SETBIT, key, toByteArray(offset), toByteArray(value));
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset),
toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
private void sendEvalCommand(Command command, byte[] script,
byte[] keyCount, byte[][] params) {
final byte[][] allArgs = new byte[params.length + 2][];
allArgs[0] = script;
allArgs[1] = keyCount;
for (int i = 0; i < params.length; i++)
allArgs[i + 2] = params[i];
sendCommand(command, allArgs);
}
public void eval(byte[] script, byte[] keyCount, byte[][] params) {
sendEvalCommand(EVAL, script, keyCount, params);
}
public void eval(byte[] script, int keyCount, byte[]... params) {
eval(script, toByteArray(keyCount), params);
}
public void evalsha(byte[] sha1, byte[] keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, keyCount, params);
}
public void evalsha(byte[] sha1, int keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, toByteArray(keyCount), params);
}
public void scriptFlush() {
sendCommand(SCRIPT, Keyword.FLUSH.raw);
}
public void scriptExists(byte[]... sha1) {
byte[][] args = new byte[sha1.length + 1][];
args[0] = Keyword.EXISTS.raw;
for (int i = 0; i < sha1.length; i++)
args[i + 1] = sha1[i];
sendCommand(SCRIPT, args);
}
public void scriptLoad(byte[] script) {
sendCommand(SCRIPT, Keyword.LOAD.raw, script);
}
public void scriptKill() {
sendCommand(SCRIPT, Keyword.KILL.raw);
}
public void slowlogGet() {
sendCommand(SLOWLOG, Keyword.GET.raw);
}
public void slowlogGet(long entries) {
sendCommand(SLOWLOG, Keyword.GET.raw, toByteArray(entries));
}
public void slowlogReset() {
sendCommand(SLOWLOG, RESET.raw);
}
public void slowlogLen() {
sendCommand(SLOWLOG, LEN.raw);
}
public void objectRefcount(byte[] key) {
sendCommand(OBJECT, REFCOUNT.raw, key);
}
public void objectIdletime(byte[] key) {
sendCommand(OBJECT, IDLETIME.raw, key);
}
public void objectEncoding(byte[] key) {
sendCommand(OBJECT, ENCODING.raw, key);
}
public void bitcount(byte[] key) {
sendCommand(BITCOUNT, key);
}
public void bitcount(byte[] key, long start, long end) {
sendCommand(BITCOUNT, key, toByteArray(start), toByteArray(end));
}
public void bitop(BitOP op, byte[] destKey, byte[]... srcKeys) {
Keyword kw = Keyword.AND;
int len = srcKeys.length;
switch (op) {
case AND:
kw = Keyword.AND;
break;
case OR:
kw = Keyword.OR;
break;
case XOR:
kw = Keyword.XOR;
break;
case NOT:
kw = Keyword.NOT;
len = Math.min(1, len);
break;
}
byte[][] bargs = new byte[len + 2][];
bargs[0] = kw.raw;
bargs[1] = destKey;
for (int i = 0; i < len; ++i) {
bargs[i + 2] = srcKeys[i];
}
sendCommand(BITOP, bargs);
}
public void sentinel(final byte[]... args) {
sendCommand(SENTINEL, args);
}
public void dump(final byte[] key) {
sendCommand(DUMP, key);
}
public void restore(final byte[] key, final int ttl, final byte[] serializedValue) {
sendCommand(RESTORE, key, toByteArray(ttl), serializedValue);
}
public void pexpire(final byte[] key, final int milliseconds) {
sendCommand(PEXPIRE, key, toByteArray(milliseconds));
}
public void pexpireAt(final byte[] key, final long millisecondsTimestamp) {
sendCommand(PEXPIREAT, key, toByteArray(millisecondsTimestamp));
}
public void pttl(final byte[] key) {
sendCommand(PTTL, key);
}
public void incrByFloat(final byte[] key, final double increment) {
sendCommand(INCRBYFLOAT, key, toByteArray(increment));
}
public void psetex(final byte[] key, final int milliseconds, final byte[] value) {
sendCommand(PSETEX, key, toByteArray(milliseconds), value);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx) {
sendCommand(Command.SET, key, value, nxxx);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx, final byte[] expx, final int time) {
sendCommand(Command.SET, key, value, nxxx, expx, toByteArray(time));
}
public void srandmember(final byte[] key, final int count) {
sendCommand(SRANDMEMBER, key, toByteArray(count));
}
public void clientKill(final byte[] client) {
sendCommand(CLIENT, Keyword.KILL.raw, client);
}
public void clientGetname() {
sendCommand(CLIENT, Keyword.GETNAME.raw);
}
public void clientList() {
sendCommand(CLIENT, Keyword.LIST.raw);
}
public void clientSetname(final byte[] name) {
sendCommand(CLIENT, Keyword.SETNAME.raw, name);
}
public void time() {
sendCommand(TIME);
}
public void migrate(final byte[] host, final int port, final byte[] key, final int destinationDb, final int timeout) {
sendCommand(MIGRATE, host, toByteArray(port), key, toByteArray(destinationDb), toByteArray(timeout));
}
public void hincrByFloat(final byte[] key, final byte[] field, double increment) {
sendCommand(HINCRBYFLOAT, key, field, toByteArray(increment));
}
}
KeepBothMethods
package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.ENCODING;
import static redis.clients.jedis.Protocol.Keyword.IDLETIME;
import static redis.clients.jedis.Protocol.Keyword.LEN;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.REFCOUNT;
import static redis.clients.jedis.Protocol.Keyword.RESET;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE(), AFTER();
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}
}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
private byte[][] joinParameters(byte[] first, byte[][] rest) {
byte[][] result = new byte[rest.length + 1][];
result[0] = first;
for (int i = 0; i < rest.length; i++) {
result[i + 1] = rest[i];
}
return result;
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(Command.PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final Entry<byte[], byte[]> entry : hash.entrySet()) {
params.add(entry.getKey());
params.add(entry.getValue());
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[]... fields) {
sendCommand(HDEL, joinParameters(key, fields));
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[]... strings) {
sendCommand(RPUSH, joinParameters(key, strings));
}
public void lpush(final byte[] key, final byte[]... strings) {
sendCommand(LPUSH, joinParameters(key, strings));
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[]... members) {
sendCommand(SADD, joinParameters(key, members));
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[]... members) {
sendCommand(SREM, joinParameters(key, members));
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey, final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zaddBinary(final byte[] key, Map<Double, byte[]> scoreMembers) {
ArrayList<byte[]> args = new ArrayList<byte[]>(scoreMembers.size() * 2 + 1);
args.add(key);
for (Map.Entry<Double, byte[]> entry : scoreMembers.entrySet()) {
args.add(toByteArray(entry.getKey()));
args.add(entry.getValue());
}
byte[][] argsArray = new byte[args.size()][];
args.toArray(argsArray);
sendCommand(ZADD, argsArray);
}
public void zrange(final byte[] key, final long start, final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrem(final byte[] key, final byte[]... members) {
sendCommand(ZREM, joinParameters(key, members));
}
public void zincrby(final byte[] key, final double score, final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final long start, final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final long start, final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end), WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final long start, final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end), WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void blpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
blpop(args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final SortingParams sortingParameters, final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void brpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
brpop(args.toArray(new byte[args.size()][]));
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
public void zcount(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZCOUNT, key, min, max);
}
public void zcount(final byte[] key, final double min, final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZCOUNT, key, byteArrayMin, byteArrayMax);
}
public void zcount(final byte[] key, final byte min[], final byte max[]) {
sendCommand(ZCOUNT, key, min, max);
}
public void zcount(final byte[] key, final String min, final String max) {
sendCommand(ZCOUNT, key, min.getBytes(), max.getBytes());
}
public void zrangeByScore(final byte[] key, final double min, final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax);
}
public void zrangeByScore(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrangeByScore(final byte[] key, final String min, final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes());
}
public void zrevrangeByScore(final byte[] key, final double max, final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin);
}
public void zrevrangeByScore(final byte[] key, final byte[] max, final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
public void zrangeByScore(final byte[] key, final byte[] min, final byte[] max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final byte[] max, final byte[] min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max, WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max, final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min, final byte[] max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max, final byte[] min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zremrangeByRank(final byte[] key, final long start, final long end) {
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final byte[] start, final byte[] end) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zrevrangeByScore(final byte[] key, final String max, final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes());
}
public void zrangeByScore(final byte[] key, final double min, final double max, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax, LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final byte min[], final byte max[], final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final String min, final String max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(), LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final double max, final double min, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin, LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final byte max[], final byte min[], final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final String max, final String min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(), LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScoreWithScores(final byte[] key, final double min, final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax, WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[], final byte max[]) {
sendCommand(ZRANGEBYSCORE, key, min, max, WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min, final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(), WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max, final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin, WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[], final byte min[]) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max, final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(), WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final double min, final double max, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax, LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[], final byte max[], final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min, final String max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(), LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max, final double min, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin, LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[], final byte min[], final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max, final String min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(), LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zremrangeByScore(final byte[] key, final byte start[], final byte end[]) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zremrangeByScore(final byte[] key, final String start, final String end) {
sendCommand(ZREMRANGEBYSCORE, key, start.getBytes(), end.getBytes());
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params, final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params, final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void info(final String section) {
sendCommand(INFO, section);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final byte[] pattern) {
sendCommand(CONFIG, Keyword.GET.raw, pattern);
}
public void configSet(final byte[] parameter, final byte[] value) {
sendCommand(CONFIG, Keyword.SET.raw, parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[]... string) {
sendCommand(LPUSHX, joinParameters(key, string));
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[]... string) {
sendCommand(RPUSHX, joinParameters(key, string));
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where, final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination, final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void setbit(byte[] key, long offset, boolean value) {
sendCommand(SETBIT, key, toByteArray(offset), toByteArray(value));
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset), toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
private void sendEvalCommand(Command command, byte[] script, byte[] keyCount, byte[][] params) {
final byte[][] allArgs = new byte[params.length + 2][];
allArgs[0] = script;
allArgs[1] = keyCount;
for (int i = 0; i < params.length; i++) allArgs[i + 2] = params[i];
sendCommand(command, allArgs);
}
public void eval(byte[] script, byte[] keyCount, byte[][] params) {
sendEvalCommand(EVAL, script, keyCount, params);
}
public void eval(byte[] script, int keyCount, byte[]... params) {
eval(script, toByteArray(keyCount), params);
}
public void evalsha(byte[] sha1, byte[] keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, keyCount, params);
}
public void evalsha(byte[] sha1, int keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, toByteArray(keyCount), params);
}
public void scriptFlush() {
sendCommand(SCRIPT, Keyword.FLUSH.raw);
}
public void scriptExists(byte[]... sha1) {
byte[][] args = new byte[sha1.length + 1][];
args[0] = Keyword.EXISTS.raw;
for (int i = 0; i < sha1.length; i++) args[i + 1] = sha1[i];
sendCommand(SCRIPT, args);
}
public void scriptLoad(byte[] script) {
sendCommand(SCRIPT, Keyword.LOAD.raw, script);
}
public void scriptKill() {
sendCommand(SCRIPT, Keyword.KILL.raw);
}
public void slowlogGet() {
sendCommand(SLOWLOG, Keyword.GET.raw);
}
public void slowlogGet(long entries) {
sendCommand(SLOWLOG, Keyword.GET.raw, toByteArray(entries));
}
public void slowlogReset() {
sendCommand(SLOWLOG, RESET.raw);
}
public void slowlogLen() {
sendCommand(SLOWLOG, LEN.raw);
}
public void objectRefcount(byte[] key) {
sendCommand(OBJECT, REFCOUNT.raw, key);
}
public void objectIdletime(byte[] key) {
sendCommand(OBJECT, IDLETIME.raw, key);
}
public void objectEncoding(byte[] key) {
sendCommand(OBJECT, ENCODING.raw, key);
}
public void bitcount(byte[] key) {
sendCommand(BITCOUNT, key);
}
public void bitcount(byte[] key, long start, long end) {
sendCommand(BITCOUNT, key, toByteArray(start), toByteArray(end));
}
public void bitop(BitOP op, byte[] destKey, byte[]... srcKeys) {
Keyword kw = Keyword.AND;
int len = srcKeys.length;
switch(op) {
case AND:
kw = Keyword.AND;
break;
case OR:
kw = Keyword.OR;
break;
case XOR:
kw = Keyword.XOR;
break;
case NOT:
kw = Keyword.NOT;
len = Math.min(1, len);
break;
}
byte[][] bargs = new byte[len + 2][];
bargs[0] = kw.raw;
bargs[1] = destKey;
for (int i = 0; i < len; ++i) {
bargs[i + 2] = srcKeys[i];
}
sendCommand(BITOP, bargs);
}
public void sentinel(final byte[]... args) {
sendCommand(SENTINEL, args);
}
public void dump(final byte[] key) {
sendCommand(DUMP, key);
}
public void restore(final byte[] key, final int ttl, final byte[] serializedValue) {
sendCommand(RESTORE, key, toByteArray(ttl), serializedValue);
}
public void pexpire(final byte[] key, final int milliseconds) {
sendCommand(PEXPIRE, key, toByteArray(milliseconds));
}
public void pexpireAt(final byte[] key, final long millisecondsTimestamp) {
sendCommand(PEXPIREAT, key, toByteArray(millisecondsTimestamp));
}
public void pttl(final byte[] key) {
sendCommand(PTTL, key);
}
public void incrByFloat(final byte[] key, final double increment) {
sendCommand(INCRBYFLOAT, key, toByteArray(increment));
}
public void psetex(final byte[] key, final int milliseconds, final byte[] value) {
sendCommand(PSETEX, key, toByteArray(milliseconds), value);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx) {
sendCommand(Command.SET, key, value, nxxx);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx, final byte[] expx, final int time) {
sendCommand(Command.SET, key, value, nxxx, expx, toByteArray(time));
}
public void srandmember(final byte[] key, final int count) {
sendCommand(SRANDMEMBER, key, toByteArray(count));
}
public void clientKill(final byte[] client) {
sendCommand(CLIENT, Keyword.KILL.raw, client);
}
public void clientGetname() {
sendCommand(CLIENT, Keyword.GETNAME.raw);
}
public void clientList() {
sendCommand(CLIENT, Keyword.LIST.raw);
}
public void clientSetname(final byte[] name) {
sendCommand(CLIENT, Keyword.SETNAME.raw, name);
}
public void time() {
sendCommand(TIME);
}
public void migrate(final byte[] host, final int port, final byte[] key, final int destinationDb, final int timeout) {
sendCommand(MIGRATE, host, toByteArray(port), key, toByteArray(destinationDb), toByteArray(timeout));
}
public void hincrByFloat(final byte[] key, final byte[] field, double increment) {
sendCommand(HINCRBYFLOAT, key, field, toByteArray(increment));
}
}
package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.ENCODING;
import static redis.clients.jedis.Protocol.Keyword.IDLETIME;
import static redis.clients.jedis.Protocol.Keyword.LEN;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.REFCOUNT;
import static redis.clients.jedis.Protocol.Keyword.RESET;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE(), AFTER();
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}
}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
private byte[][] joinParameters(byte[] first, byte[][] rest) {
byte[][] result = new byte[rest.length + 1][];
result[0] = first;
for (int i = 0; i < rest.length; i++) {
result[i + 1] = rest[i];
}
return result;
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(Command.PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final Entry<byte[], byte[]> entry : hash.entrySet()) {
params.add(entry.getKey());
params.add(entry.getValue());
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[]... fields) {
sendCommand(HDEL, joinParameters(key, fields));
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[]... strings) {
sendCommand(RPUSH, joinParameters(key, strings));
}
public void lpush(final byte[] key, final byte[]... strings) {
sendCommand(LPUSH, joinParameters(key, strings));
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[]... members) {
sendCommand(SADD, joinParameters(key, members));
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[]... members) {
sendCommand(SREM, joinParameters(key, members));
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey, final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zaddBinary(final byte[] key, Map<Double, byte[]> scoreMembers) {
ArrayList<byte[]> args = new ArrayList<byte[]>(scoreMembers.size() * 2 + 1);
args.add(key);
for (Map.Entry<Double, byte[]> entry : scoreMembers.entrySet()) {
args.add(toByteArray(entry.getKey()));
args.add(entry.getValue());
}
byte[][] argsArray = new byte[args.size()][];
args.toArray(argsArray);
sendCommand(ZADD, argsArray);
}
public void zrange(final byte[] key, final long start, final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrem(final byte[] key, final byte[]... members) {
sendCommand(ZREM, joinParameters(key, members));
}
public void zincrby(final byte[] key, final double score, final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final long start, final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final long start, final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end), WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final long start, final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end), WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void blpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
blpop(args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final SortingParams sortingParameters, final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void brpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
brpop(args.toArray(new byte[args.size()][]));
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
public void zcount(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZCOUNT, key, min, max);
}
public void zcount(final byte[] key, final double min, final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZCOUNT, key, byteArrayMin, byteArrayMax);
}
public void zcount(final byte[] key, final byte min[], final byte max[]) {
sendCommand(ZCOUNT, key, min, max);
}
public void zcount(final byte[] key, final String min, final String max) {
sendCommand(ZCOUNT, key, min.getBytes(), max.getBytes());
}
public void zrangeByScore(final byte[] key, final double min, final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax);
}
public void zrangeByScore(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrangeByScore(final byte[] key, final String min, final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes());
}
public void zrevrangeByScore(final byte[] key, final double max, final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin);
}
public void zrevrangeByScore(final byte[] key, final byte[] max, final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
public void zrangeByScore(final byte[] key, final byte[] min, final byte[] max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final byte[] max, final byte[] min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max, WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max, final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min, final byte[] max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max, final byte[] min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zremrangeByRank(final byte[] key, final long start, final long end) {
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final byte[] start, final byte[] end) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zrevrangeByScore(final byte[] key, final String max, final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes());
}
public void zrangeByScore(final byte[] key, final double min, final double max, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax, LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final byte min[], final byte max[], final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final String min, final String max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(), LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final double max, final double min, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin, LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final byte max[], final byte min[], final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final String max, final String min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(), LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScoreWithScores(final byte[] key, final double min, final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax, WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[], final byte max[]) {
sendCommand(ZRANGEBYSCORE, key, min, max, WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min, final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(), WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max, final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin, WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[], final byte min[]) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max, final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(), WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final double min, final double max, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax, LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[], final byte max[], final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min, final String max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(), LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final double max, final double min, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin, LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[], final byte min[], final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max, final String min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(), LIMIT.raw, toByteArray(offset), toByteArray(count), WITHSCORES.raw);
}
public void zremrangeByScore(final byte[] key, final byte start[], final byte end[]) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zremrangeByScore(final byte[] key, final String start, final String end) {
sendCommand(ZREMRANGEBYSCORE, key, start.getBytes(), end.getBytes());
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params, final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params, final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void info(final String section) {
sendCommand(INFO, section);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final byte[] pattern) {
sendCommand(CONFIG, Keyword.GET.raw, pattern);
}
public void configSet(final byte[] parameter, final byte[] value) {
sendCommand(CONFIG, Keyword.SET.raw, parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[]... string) {
sendCommand(LPUSHX, joinParameters(key, string));
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[]... string) {
sendCommand(RPUSHX, joinParameters(key, string));
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where, final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination, final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void setbit(byte[] key, long offset, boolean value) {
sendCommand(SETBIT, key, toByteArray(offset), toByteArray(value));
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset), toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
private void sendEvalCommand(Command command, byte[] script, byte[] keyCount, byte[][] params) {
final byte[][] allArgs = new byte[params.length + 2][];
allArgs[0] = script;
allArgs[1] = keyCount;
for (int i = 0; i < params.length; i++) allArgs[i + 2] = params[i];
sendCommand(command, allArgs);
}
public void eval(byte[] script, byte[] keyCount, byte[][] params) {
sendEvalCommand(EVAL, script, keyCount, params);
}
public void eval(byte[] script, int keyCount, byte[]... params) {
eval(script, toByteArray(keyCount), params);
}
public void evalsha(byte[] sha1, byte[] keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, keyCount, params);
}
public void evalsha(byte[] sha1, int keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, toByteArray(keyCount), params);
}
public void scriptFlush() {
sendCommand(SCRIPT, Keyword.FLUSH.raw);
}
public void scriptExists(byte[]... sha1) {
byte[][] args = new byte[sha1.length + 1][];
args[0] = Keyword.EXISTS.raw;
for (int i = 0; i < sha1.length; i++) args[i + 1] = sha1[i];
sendCommand(SCRIPT, args);
}
public void scriptLoad(byte[] script) {
sendCommand(SCRIPT, Keyword.LOAD.raw, script);
}
public void scriptKill() {
sendCommand(SCRIPT, Keyword.KILL.raw);
}
public void slowlogGet() {
sendCommand(SLOWLOG, Keyword.GET.raw);
}
public void slowlogGet(long entries) {
sendCommand(SLOWLOG, Keyword.GET.raw, toByteArray(entries));
}
public void slowlogReset() {
sendCommand(SLOWLOG, RESET.raw);
}
public void slowlogLen() {
sendCommand(SLOWLOG, LEN.raw);
}
public void objectRefcount(byte[] key) {
sendCommand(OBJECT, REFCOUNT.raw, key);
}
public void objectIdletime(byte[] key) {
sendCommand(OBJECT, IDLETIME.raw, key);
}
public void objectEncoding(byte[] key) {
sendCommand(OBJECT, ENCODING.raw, key);
}
public void bitcount(byte[] key) {
sendCommand(BITCOUNT, key);
}
public void bitcount(byte[] key, long start, long end) {
sendCommand(BITCOUNT, key, toByteArray(start), toByteArray(end));
}
public void bitop(BitOP op, byte[] destKey, byte[]... srcKeys) {
Keyword kw = Keyword.AND;
int len = srcKeys.length;
switch(op) {
case AND:
kw = Keyword.AND;
break;
case OR:
kw = Keyword.OR;
break;
case XOR:
kw = Keyword.XOR;
break;
case NOT:
kw = Keyword.NOT;
len = Math.min(1, len);
break;
}
byte[][] bargs = new byte[len + 2][];
bargs[0] = kw.raw;
bargs[1] = destKey;
for (int i = 0; i < len; ++i) {
bargs[i + 2] = srcKeys[i];
}
sendCommand(BITOP, bargs);
}
public void sentinel(final byte[]... args) {
sendCommand(SENTINEL, args);
}
public void dump(final byte[] key) {
sendCommand(DUMP, key);
}
public void restore(final byte[] key, final int ttl, final byte[] serializedValue) {
sendCommand(RESTORE, key, toByteArray(ttl), serializedValue);
}
public void pexpire(final byte[] key, final int milliseconds) {
sendCommand(PEXPIRE, key, toByteArray(milliseconds));
}
public void pexpireAt(final byte[] key, final long millisecondsTimestamp) {
sendCommand(PEXPIREAT, key, toByteArray(millisecondsTimestamp));
}
public void pttl(final byte[] key) {
sendCommand(PTTL, key);
}
public void incrByFloat(final byte[] key, final double increment) {
sendCommand(INCRBYFLOAT, key, toByteArray(increment));
}
public void psetex(final byte[] key, final int milliseconds, final byte[] value) {
sendCommand(PSETEX, key, toByteArray(milliseconds), value);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx) {
sendCommand(Command.SET, key, value, nxxx);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx, final byte[] expx, final int time) {
sendCommand(Command.SET, key, value, nxxx, expx, toByteArray(time));
}
public void srandmember(final byte[] key, final int count) {
sendCommand(SRANDMEMBER, key, toByteArray(count));
}
public void clientKill(final byte[] client) {
sendCommand(CLIENT, Keyword.KILL.raw, client);
}
public void clientGetname() {
sendCommand(CLIENT, Keyword.GETNAME.raw);
}
public void clientList() {
sendCommand(CLIENT, Keyword.LIST.raw);
}
public void clientSetname(final byte[] name) {
sendCommand(CLIENT, Keyword.SETNAME.raw, name);
}
public void time() {
sendCommand(TIME);
}
public void migrate(final byte[] host, final int port, final byte[] key, final int destinationDb, final int timeout) {
sendCommand(MIGRATE, host, toByteArray(port), key, toByteArray(destinationDb), toByteArray(timeout));
}
public void hincrByFloat(final byte[] key, final byte[] field, double increment) {
sendCommand(HINCRBYFLOAT, key, field, toByteArray(increment));
}
}
Safe
package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.ENCODING;
import static redis.clients.jedis.Protocol.Keyword.IDLETIME;
import static redis.clients.jedis.Protocol.Keyword.LEN;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.REFCOUNT;
import static redis.clients.jedis.Protocol.Keyword.RESET;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE , AFTER;
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
private byte[][] joinParameters(byte[] first, byte[][] rest) {
byte[][] result = new byte[rest.length + 1][];
result[0] = first;
for (int i = 0; i < rest.length; i++) {
result[i + 1] = rest[i];
}
return result;
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(Command.PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final Entry<byte[], byte[]> entry : hash.entrySet()) {
params.add(entry.getKey());
params.add(entry.getValue());
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[]... fields) {
sendCommand(HDEL, joinParameters(key, fields));
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[]... strings) {
sendCommand(RPUSH, joinParameters(key, strings));
}
public void lpush(final byte[] key, final byte[]... strings) {
sendCommand(LPUSH, joinParameters(key, strings));
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[]... members) {
sendCommand(SADD, joinParameters(key, members));
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[]... members) {
sendCommand(SREM, joinParameters(key, members));
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey,
final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zaddBinary(final byte[] key, Map<Double, byte[]> scoreMembers) {
ArrayList<byte[]> args = new ArrayList<byte[]>(
scoreMembers.size() * 2 + 1);
args.add(key);
for (Map.Entry<Double, byte[]> entry : scoreMembers.entrySet()) {
args.add(toByteArray(entry.getKey()));
args.add(entry.getValue());
}
byte[][] argsArray = new byte[args.size()][];
args.toArray(argsArray);
sendCommand(ZADD, argsArray);
}
public void zrem(final byte[] key, final byte[]... members) {
sendCommand(ZREM, joinParameters(key, members));
}
public void zincrby(final byte[] key, final double score,
final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final long start, final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void blpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
blpop(args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final SortingParams sortingParameters,
final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void brpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
brpop(args.toArray(new byte[args.size()][]));
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
public void zcount(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZCOUNT, key, min, max);
}
<<<<<<< MINE
public void zcount(final byte[] key, final double min, final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZCOUNT, key, byteArrayMin, byteArrayMax);
=======
public void zrange(final byte[] key, final long start, final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
>>>>>>> YOURS
}
public void zcount(final byte[] key, final byte min[], final byte max[]) {
sendCommand(ZCOUNT, key, min, max);
}
public void zcount(final byte[] key, final String min, final String max) {
sendCommand(ZCOUNT, key, min.getBytes(), max.getBytes());
}
<<<<<<< MINE
public void zrangeByScore(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax);
}
=======
>>>>>>> YOURS
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrangeByScore(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes());
}
public void zrevrangeByScore(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin);
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max, WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, WITHSCORES.raw);
}
public void zremrangeByRank(final byte[] key, final long start,
final long end) {
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final byte[] start,
final byte[] end) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes());
}
<<<<<<< MINE
public void zrangeByScore(final byte[] key, final double min,
final double max, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count));
=======
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count));
>>>>>>> YOURS
}
public void zrangeByScore(final byte[] key, final byte min[],
final byte max[], final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final String min,
final String max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
<<<<<<< MINE
public void zrevrangeByScore(final byte[] key, final double max,
final double min, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count));
=======
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count));
>>>>>>> YOURS
}
public void zrevrangeByScore(final byte[] key, final byte max[],
final byte min[], final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
<<<<<<< MINE
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
WITHSCORES.raw);
}
=======
>>>>>>> YOURS
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[]) {
sendCommand(ZRANGEBYSCORE, key, min, max,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
WITHSCORES.raw);
}
<<<<<<< MINE
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
WITHSCORES.raw);
}
=======
>>>>>>> YOURS
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[]) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
WITHSCORES.raw);
}
<<<<<<< MINE
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
=======
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
>>>>>>> YOURS
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[], final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
<<<<<<< MINE
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
=======
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
>>>>>>> YOURS
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[], final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zremrangeByScore(final byte[] key, final byte start[],
final byte end[]) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zremrangeByScore(final byte[] key, final String start,
final String end) {
sendCommand(ZREMRANGEBYSCORE, key, start.getBytes(), end.getBytes());
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void info(final String section) {
sendCommand(INFO, section);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final byte[] pattern) {
sendCommand(CONFIG, Keyword.GET.raw, pattern);
}
public void configSet(final byte[] parameter, final byte[] value) {
sendCommand(CONFIG, Keyword.SET.raw, parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[]... string) {
sendCommand(LPUSHX, joinParameters(key, string));
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[]... string) {
sendCommand(RPUSHX, joinParameters(key, string));
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where,
final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination,
final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void setbit(byte[] key, long offset, boolean value) {
sendCommand(SETBIT, key, toByteArray(offset), toByteArray(value));
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset),
toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
private void sendEvalCommand(Command command, byte[] script,
byte[] keyCount, byte[][] params) {
final byte[][] allArgs = new byte[params.length + 2][];
allArgs[0] = script;
allArgs[1] = keyCount;
for (int i = 0; i < params.length; i++)
allArgs[i + 2] = params[i];
sendCommand(command, allArgs);
}
public void eval(byte[] script, byte[] keyCount, byte[][] params) {
sendEvalCommand(EVAL, script, keyCount, params);
}
public void eval(byte[] script, int keyCount, byte[]... params) {
eval(script, toByteArray(keyCount), params);
}
public void evalsha(byte[] sha1, byte[] keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, keyCount, params);
}
public void evalsha(byte[] sha1, int keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, toByteArray(keyCount), params);
}
public void scriptFlush() {
sendCommand(SCRIPT, Keyword.FLUSH.raw);
}
public void scriptExists(byte[]... sha1) {
byte[][] args = new byte[sha1.length + 1][];
args[0] = Keyword.EXISTS.raw;
for (int i = 0; i < sha1.length; i++)
args[i + 1] = sha1[i];
sendCommand(SCRIPT, args);
}
public void scriptLoad(byte[] script) {
sendCommand(SCRIPT, Keyword.LOAD.raw, script);
}
public void scriptKill() {
sendCommand(SCRIPT, Keyword.KILL.raw);
}
public void slowlogGet() {
sendCommand(SLOWLOG, Keyword.GET.raw);
}
public void slowlogGet(long entries) {
sendCommand(SLOWLOG, Keyword.GET.raw, toByteArray(entries));
}
public void slowlogReset() {
sendCommand(SLOWLOG, RESET.raw);
}
public void slowlogLen() {
sendCommand(SLOWLOG, LEN.raw);
}
public void objectRefcount(byte[] key) {
sendCommand(OBJECT, REFCOUNT.raw, key);
}
public void objectIdletime(byte[] key) {
sendCommand(OBJECT, IDLETIME.raw, key);
}
public void objectEncoding(byte[] key) {
sendCommand(OBJECT, ENCODING.raw, key);
}
public void bitcount(byte[] key) {
sendCommand(BITCOUNT, key);
}
public void bitcount(byte[] key, long start, long end) {
sendCommand(BITCOUNT, key, toByteArray(start), toByteArray(end));
}
public void bitop(BitOP op, byte[] destKey, byte[]... srcKeys) {
Keyword kw = Keyword.AND;
int len = srcKeys.length;
switch (op) {
case AND:
kw = Keyword.AND;
break;
case OR:
kw = Keyword.OR;
break;
case XOR:
kw = Keyword.XOR;
break;
case NOT:
kw = Keyword.NOT;
len = Math.min(1, len);
break;
}
byte[][] bargs = new byte[len + 2][];
bargs[0] = kw.raw;
bargs[1] = destKey;
for (int i = 0; i < len; ++i) {
bargs[i + 2] = srcKeys[i];
}
sendCommand(BITOP, bargs);
}
public void sentinel(final byte[]... args) {
sendCommand(SENTINEL, args);
}
public void dump(final byte[] key) {
sendCommand(DUMP, key);
}
public void restore(final byte[] key, final int ttl, final byte[] serializedValue) {
sendCommand(RESTORE, key, toByteArray(ttl), serializedValue);
}
public void pexpire(final byte[] key, final int milliseconds) {
sendCommand(PEXPIRE, key, toByteArray(milliseconds));
}
public void pexpireAt(final byte[] key, final long millisecondsTimestamp) {
sendCommand(PEXPIREAT, key, toByteArray(millisecondsTimestamp));
}
public void pttl(final byte[] key) {
sendCommand(PTTL, key);
}
public void incrByFloat(final byte[] key, final double increment) {
sendCommand(INCRBYFLOAT, key, toByteArray(increment));
}
public void psetex(final byte[] key, final int milliseconds, final byte[] value) {
sendCommand(PSETEX, key, toByteArray(milliseconds), value);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx) {
sendCommand(Command.SET, key, value, nxxx);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx, final byte[] expx, final int time) {
sendCommand(Command.SET, key, value, nxxx, expx, toByteArray(time));
}
public void srandmember(final byte[] key, final int count) {
sendCommand(SRANDMEMBER, key, toByteArray(count));
}
public void clientKill(final byte[] client) {
sendCommand(CLIENT, Keyword.KILL.raw, client);
}
public void clientGetname() {
sendCommand(CLIENT, Keyword.GETNAME.raw);
}
public void clientList() {
sendCommand(CLIENT, Keyword.LIST.raw);
}
public void clientSetname(final byte[] name) {
sendCommand(CLIENT, Keyword.SETNAME.raw, name);
}
public void time() {
sendCommand(TIME);
}
public void migrate(final byte[] host, final int port, final byte[] key, final int destinationDb, final int timeout) {
sendCommand(MIGRATE, host, toByteArray(port), key, toByteArray(destinationDb), toByteArray(timeout));
}
public void hincrByFloat(final byte[] key, final byte[] field, double increment) {
sendCommand(HINCRBYFLOAT, key, field, toByteArray(increment));
}
}
package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.ENCODING;
import static redis.clients.jedis.Protocol.Keyword.IDLETIME;
import static redis.clients.jedis.Protocol.Keyword.LEN;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.REFCOUNT;
import static redis.clients.jedis.Protocol.Keyword.RESET;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE , AFTER;
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
private byte[][] joinParameters(byte[] first, byte[][] rest) {
byte[][] result = new byte[rest.length + 1][];
result[0] = first;
for (int i = 0; i < rest.length; i++) {
result[i + 1] = rest[i];
}
return result;
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(Command.PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final Entry<byte[], byte[]> entry : hash.entrySet()) {
params.add(entry.getKey());
params.add(entry.getValue());
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[]... fields) {
sendCommand(HDEL, joinParameters(key, fields));
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[]... strings) {
sendCommand(RPUSH, joinParameters(key, strings));
}
public void lpush(final byte[] key, final byte[]... strings) {
sendCommand(LPUSH, joinParameters(key, strings));
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[]... members) {
sendCommand(SADD, joinParameters(key, members));
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[]... members) {
sendCommand(SREM, joinParameters(key, members));
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey,
final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zaddBinary(final byte[] key, Map<Double, byte[]> scoreMembers) {
ArrayList<byte[]> args = new ArrayList<byte[]>(
scoreMembers.size() * 2 + 1);
args.add(key);
for (Map.Entry<Double, byte[]> entry : scoreMembers.entrySet()) {
args.add(toByteArray(entry.getKey()));
args.add(entry.getValue());
}
byte[][] argsArray = new byte[args.size()][];
args.toArray(argsArray);
sendCommand(ZADD, argsArray);
}
public void zrem(final byte[] key, final byte[]... members) {
sendCommand(ZREM, joinParameters(key, members));
}
public void zincrby(final byte[] key, final double score,
final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final long start, final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void blpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
blpop(args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final SortingParams sortingParameters,
final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void brpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
brpop(args.toArray(new byte[args.size()][]));
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
public void zcount(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZCOUNT, key, min, max);
}
<<<<<<< MINE
public void zcount(final byte[] key, final double min, final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZCOUNT, key, byteArrayMin, byteArrayMax);
=======
public void zrange(final byte[] key, final long start, final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
>>>>>>> YOURS
}
public void zcount(final byte[] key, final byte min[], final byte max[]) {
sendCommand(ZCOUNT, key, min, max);
}
public void zcount(final byte[] key, final String min, final String max) {
sendCommand(ZCOUNT, key, min.getBytes(), max.getBytes());
}
<<<<<<< MINE
public void zrangeByScore(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax);
}
=======
>>>>>>> YOURS
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrangeByScore(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes());
}
public void zrevrangeByScore(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin);
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max, WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, WITHSCORES.raw);
}
public void zremrangeByRank(final byte[] key, final long start,
final long end) {
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final byte[] start,
final byte[] end) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes());
}
<<<<<<< MINE
public void zrangeByScore(final byte[] key, final double min,
final double max, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count));
=======
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count));
>>>>>>> YOURS
}
public void zrangeByScore(final byte[] key, final byte min[],
final byte max[], final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final String min,
final String max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
<<<<<<< MINE
public void zrevrangeByScore(final byte[] key, final double max,
final double min, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count));
=======
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count));
>>>>>>> YOURS
}
public void zrevrangeByScore(final byte[] key, final byte max[],
final byte min[], final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
<<<<<<< MINE
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
WITHSCORES.raw);
}
=======
>>>>>>> YOURS
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[]) {
sendCommand(ZRANGEBYSCORE, key, min, max,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
WITHSCORES.raw);
}
<<<<<<< MINE
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
WITHSCORES.raw);
}
=======
>>>>>>> YOURS
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[]) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
WITHSCORES.raw);
}
<<<<<<< MINE
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
=======
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
>>>>>>> YOURS
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[], final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
<<<<<<< MINE
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
=======
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
>>>>>>> YOURS
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[], final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zremrangeByScore(final byte[] key, final byte start[],
final byte end[]) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zremrangeByScore(final byte[] key, final String start,
final String end) {
sendCommand(ZREMRANGEBYSCORE, key, start.getBytes(), end.getBytes());
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void info(final String section) {
sendCommand(INFO, section);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final byte[] pattern) {
sendCommand(CONFIG, Keyword.GET.raw, pattern);
}
public void configSet(final byte[] parameter, final byte[] value) {
sendCommand(CONFIG, Keyword.SET.raw, parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[]... string) {
sendCommand(LPUSHX, joinParameters(key, string));
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[]... string) {
sendCommand(RPUSHX, joinParameters(key, string));
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where,
final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination,
final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void setbit(byte[] key, long offset, boolean value) {
sendCommand(SETBIT, key, toByteArray(offset), toByteArray(value));
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset),
toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
private void sendEvalCommand(Command command, byte[] script,
byte[] keyCount, byte[][] params) {
final byte[][] allArgs = new byte[params.length + 2][];
allArgs[0] = script;
allArgs[1] = keyCount;
for (int i = 0; i < params.length; i++)
allArgs[i + 2] = params[i];
sendCommand(command, allArgs);
}
public void eval(byte[] script, byte[] keyCount, byte[][] params) {
sendEvalCommand(EVAL, script, keyCount, params);
}
public void eval(byte[] script, int keyCount, byte[]... params) {
eval(script, toByteArray(keyCount), params);
}
public void evalsha(byte[] sha1, byte[] keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, keyCount, params);
}
public void evalsha(byte[] sha1, int keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, toByteArray(keyCount), params);
}
public void scriptFlush() {
sendCommand(SCRIPT, Keyword.FLUSH.raw);
}
public void scriptExists(byte[]... sha1) {
byte[][] args = new byte[sha1.length + 1][];
args[0] = Keyword.EXISTS.raw;
for (int i = 0; i < sha1.length; i++)
args[i + 1] = sha1[i];
sendCommand(SCRIPT, args);
}
public void scriptLoad(byte[] script) {
sendCommand(SCRIPT, Keyword.LOAD.raw, script);
}
public void scriptKill() {
sendCommand(SCRIPT, Keyword.KILL.raw);
}
public void slowlogGet() {
sendCommand(SLOWLOG, Keyword.GET.raw);
}
public void slowlogGet(long entries) {
sendCommand(SLOWLOG, Keyword.GET.raw, toByteArray(entries));
}
public void slowlogReset() {
sendCommand(SLOWLOG, RESET.raw);
}
public void slowlogLen() {
sendCommand(SLOWLOG, LEN.raw);
}
public void objectRefcount(byte[] key) {
sendCommand(OBJECT, REFCOUNT.raw, key);
}
public void objectIdletime(byte[] key) {
sendCommand(OBJECT, IDLETIME.raw, key);
}
public void objectEncoding(byte[] key) {
sendCommand(OBJECT, ENCODING.raw, key);
}
public void bitcount(byte[] key) {
sendCommand(BITCOUNT, key);
}
public void bitcount(byte[] key, long start, long end) {
sendCommand(BITCOUNT, key, toByteArray(start), toByteArray(end));
}
public void bitop(BitOP op, byte[] destKey, byte[]... srcKeys) {
Keyword kw = Keyword.AND;
int len = srcKeys.length;
switch (op) {
case AND:
kw = Keyword.AND;
break;
case OR:
kw = Keyword.OR;
break;
case XOR:
kw = Keyword.XOR;
break;
case NOT:
kw = Keyword.NOT;
len = Math.min(1, len);
break;
}
byte[][] bargs = new byte[len + 2][];
bargs[0] = kw.raw;
bargs[1] = destKey;
for (int i = 0; i < len; ++i) {
bargs[i + 2] = srcKeys[i];
}
sendCommand(BITOP, bargs);
}
public void sentinel(final byte[]... args) {
sendCommand(SENTINEL, args);
}
public void dump(final byte[] key) {
sendCommand(DUMP, key);
}
public void restore(final byte[] key, final int ttl, final byte[] serializedValue) {
sendCommand(RESTORE, key, toByteArray(ttl), serializedValue);
}
public void pexpire(final byte[] key, final int milliseconds) {
sendCommand(PEXPIRE, key, toByteArray(milliseconds));
}
public void pexpireAt(final byte[] key, final long millisecondsTimestamp) {
sendCommand(PEXPIREAT, key, toByteArray(millisecondsTimestamp));
}
public void pttl(final byte[] key) {
sendCommand(PTTL, key);
}
public void incrByFloat(final byte[] key, final double increment) {
sendCommand(INCRBYFLOAT, key, toByteArray(increment));
}
public void psetex(final byte[] key, final int milliseconds, final byte[] value) {
sendCommand(PSETEX, key, toByteArray(milliseconds), value);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx) {
sendCommand(Command.SET, key, value, nxxx);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx, final byte[] expx, final int time) {
sendCommand(Command.SET, key, value, nxxx, expx, toByteArray(time));
}
public void srandmember(final byte[] key, final int count) {
sendCommand(SRANDMEMBER, key, toByteArray(count));
}
public void clientKill(final byte[] client) {
sendCommand(CLIENT, Keyword.KILL.raw, client);
}
public void clientGetname() {
sendCommand(CLIENT, Keyword.GETNAME.raw);
}
public void clientList() {
sendCommand(CLIENT, Keyword.LIST.raw);
}
public void clientSetname(final byte[] name) {
sendCommand(CLIENT, Keyword.SETNAME.raw, name);
}
public void time() {
sendCommand(TIME);
}
public void migrate(final byte[] host, final int port, final byte[] key, final int destinationDb, final int timeout) {
sendCommand(MIGRATE, host, toByteArray(port), key, toByteArray(destinationDb), toByteArray(timeout));
}
public void hincrByFloat(final byte[] key, final byte[] field, double increment) {
sendCommand(HINCRBYFLOAT, key, field, toByteArray(increment));
}
}
Unstructured
package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.ENCODING;
import static redis.clients.jedis.Protocol.Keyword.IDLETIME;
import static redis.clients.jedis.Protocol.Keyword.LEN;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.REFCOUNT;
import static redis.clients.jedis.Protocol.Keyword.RESET;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE, AFTER;
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}
}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
private byte[][] joinParameters(byte[] first, byte[][] rest) {
byte[][] result = new byte[rest.length + 1][];
result[0] = first;
for (int i = 0; i < rest.length; i++) {
result[i + 1] = rest[i];
}
return result;
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(Command.PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final Entry<byte[], byte[]> entry : hash.entrySet()) {
params.add(entry.getKey());
params.add(entry.getValue());
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[]... fields) {
sendCommand(HDEL, joinParameters(key, fields));
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[]... strings) {
sendCommand(RPUSH, joinParameters(key, strings));
}
public void lpush(final byte[] key, final byte[]... strings) {
sendCommand(LPUSH, joinParameters(key, strings));
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[]... members) {
sendCommand(SADD, joinParameters(key, members));
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[]... members) {
sendCommand(SREM, joinParameters(key, members));
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey,
final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zaddBinary(final byte[] key, Map<Double, byte[]> scoreMembers) {
ArrayList<byte[]> args = new ArrayList<byte[]>(
scoreMembers.size() * 2 + 1);
args.add(key);
for (Map.Entry<Double, byte[]> entry : scoreMembers.entrySet()) {
args.add(toByteArray(entry.getKey()));
args.add(entry.getValue());
}
byte[][] argsArray = new byte[args.size()][];
args.toArray(argsArray);
sendCommand(ZADD, argsArray);
}
public void zrange(final byte[] key, final long start, final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrem(final byte[] key, final byte[]... members) {
sendCommand(ZREM, joinParameters(key, members));
}
public void zincrby(final byte[] key, final double score,
final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final long start, final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void blpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
blpop(args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final SortingParams sortingParameters,
final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void brpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
brpop(args.toArray(new byte[args.size()][]));
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
<<<<<<< MINE
public void zcount(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZCOUNT, key, min, max);
=======
public void zcount(final byte[] key, final double min, final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZCOUNT, key, byteArrayMin, byteArrayMax);
}
public void zcount(final byte[] key, final byte min[], final byte max[]) {
sendCommand(ZCOUNT, key, min, max);
}
public void zcount(final byte[] key, final String min, final String max) {
sendCommand(ZCOUNT, key, min.getBytes(), max.getBytes());
}
public void zrangeByScore(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax);
>>>>>>> YOURS
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrangeByScore(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes());
}
public void zrevrangeByScore(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin);
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
<<<<<<< MINE
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count));
=======
public void zrevrangeByScore(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes());
}
public void zrangeByScore(final byte[] key, final double min,
final double max, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final byte min[],
final byte max[], final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final String min,
final String max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
>>>>>>> YOURS
}
<<<<<<< MINE
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count));
=======
public void zrevrangeByScore(final byte[] key, final double max,
final double min, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final byte max[],
final byte min[], final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
>>>>>>> YOURS
}
<<<<<<< MINE
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max, WITHSCORES.raw);
=======
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[]) {
sendCommand(ZRANGEBYSCORE, key, min, max,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
WITHSCORES.raw);
>>>>>>> YOURS
}
<<<<<<< MINE
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, WITHSCORES.raw);
=======
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[]) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
WITHSCORES.raw);
>>>>>>> YOURS
}
<<<<<<< MINE
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
=======
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[], final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
>>>>>>> YOURS
}
<<<<<<< MINE
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
=======
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[], final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
>>>>>>> YOURS
}
public void zremrangeByRank(final byte[] key, final long start,
final long end) {
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final byte[] start,
final byte[] end) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zremrangeByScore(final byte[] key, final byte start[],
final byte end[]) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zremrangeByScore(final byte[] key, final String start,
final String end) {
sendCommand(ZREMRANGEBYSCORE, key, start.getBytes(), end.getBytes());
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void info(final String section) {
sendCommand(INFO, section);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final byte[] pattern) {
sendCommand(CONFIG, Keyword.GET.raw, pattern);
}
public void configSet(final byte[] parameter, final byte[] value) {
sendCommand(CONFIG, Keyword.SET.raw, parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[]... string) {
sendCommand(LPUSHX, joinParameters(key, string));
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[]... string) {
sendCommand(RPUSHX, joinParameters(key, string));
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where,
final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination,
final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void setbit(byte[] key, long offset, boolean value) {
sendCommand(SETBIT, key, toByteArray(offset), toByteArray(value));
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset),
toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
private void sendEvalCommand(Command command, byte[] script,
byte[] keyCount, byte[][] params) {
final byte[][] allArgs = new byte[params.length + 2][];
allArgs[0] = script;
allArgs[1] = keyCount;
for (int i = 0; i < params.length; i++)
allArgs[i + 2] = params[i];
sendCommand(command, allArgs);
}
public void eval(byte[] script, byte[] keyCount, byte[][] params) {
sendEvalCommand(EVAL, script, keyCount, params);
}
public void eval(byte[] script, int keyCount, byte[]... params) {
eval(script, toByteArray(keyCount), params);
}
public void evalsha(byte[] sha1, byte[] keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, keyCount, params);
}
public void evalsha(byte[] sha1, int keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, toByteArray(keyCount), params);
}
public void scriptFlush() {
sendCommand(SCRIPT, Keyword.FLUSH.raw);
}
public void scriptExists(byte[]... sha1) {
byte[][] args = new byte[sha1.length + 1][];
args[0] = Keyword.EXISTS.raw;
for (int i = 0; i < sha1.length; i++)
args[i + 1] = sha1[i];
sendCommand(SCRIPT, args);
}
public void scriptLoad(byte[] script) {
sendCommand(SCRIPT, Keyword.LOAD.raw, script);
}
public void scriptKill() {
sendCommand(SCRIPT, Keyword.KILL.raw);
}
public void slowlogGet() {
sendCommand(SLOWLOG, Keyword.GET.raw);
}
public void slowlogGet(long entries) {
sendCommand(SLOWLOG, Keyword.GET.raw, toByteArray(entries));
}
public void slowlogReset() {
sendCommand(SLOWLOG, RESET.raw);
}
public void slowlogLen() {
sendCommand(SLOWLOG, LEN.raw);
}
public void objectRefcount(byte[] key) {
sendCommand(OBJECT, REFCOUNT.raw, key);
}
public void objectIdletime(byte[] key) {
sendCommand(OBJECT, IDLETIME.raw, key);
}
public void objectEncoding(byte[] key) {
sendCommand(OBJECT, ENCODING.raw, key);
}
public void bitcount(byte[] key) {
sendCommand(BITCOUNT, key);
}
public void bitcount(byte[] key, long start, long end) {
sendCommand(BITCOUNT, key, toByteArray(start), toByteArray(end));
}
public void bitop(BitOP op, byte[] destKey, byte[]... srcKeys) {
Keyword kw = Keyword.AND;
int len = srcKeys.length;
switch (op) {
case AND:
kw = Keyword.AND;
break;
case OR:
kw = Keyword.OR;
break;
case XOR:
kw = Keyword.XOR;
break;
case NOT:
kw = Keyword.NOT;
len = Math.min(1, len);
break;
}
byte[][] bargs = new byte[len + 2][];
bargs[0] = kw.raw;
bargs[1] = destKey;
for (int i = 0; i < len; ++i) {
bargs[i + 2] = srcKeys[i];
}
sendCommand(BITOP, bargs);
}
public void sentinel(final byte[]... args) {
sendCommand(SENTINEL, args);
}
public void dump(final byte[] key) {
sendCommand(DUMP, key);
}
public void restore(final byte[] key, final int ttl, final byte[] serializedValue) {
sendCommand(RESTORE, key, toByteArray(ttl), serializedValue);
}
public void pexpire(final byte[] key, final int milliseconds) {
sendCommand(PEXPIRE, key, toByteArray(milliseconds));
}
public void pexpireAt(final byte[] key, final long millisecondsTimestamp) {
sendCommand(PEXPIREAT, key, toByteArray(millisecondsTimestamp));
}
public void pttl(final byte[] key) {
sendCommand(PTTL, key);
}
public void incrByFloat(final byte[] key, final double increment) {
sendCommand(INCRBYFLOAT, key, toByteArray(increment));
}
public void psetex(final byte[] key, final int milliseconds, final byte[] value) {
sendCommand(PSETEX, key, toByteArray(milliseconds), value);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx) {
sendCommand(Command.SET, key, value, nxxx);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx, final byte[] expx, final int time) {
sendCommand(Command.SET, key, value, nxxx, expx, toByteArray(time));
}
public void srandmember(final byte[] key, final int count) {
sendCommand(SRANDMEMBER, key, toByteArray(count));
}
public void clientKill(final byte[] client) {
sendCommand(CLIENT, Keyword.KILL.raw, client);
}
public void clientGetname() {
sendCommand(CLIENT, Keyword.GETNAME.raw);
}
public void clientList() {
sendCommand(CLIENT, Keyword.LIST.raw);
}
public void clientSetname(final byte[] name) {
sendCommand(CLIENT, Keyword.SETNAME.raw, name);
}
public void time() {
sendCommand(TIME);
}
public void migrate(final byte[] host, final int port, final byte[] key, final int destinationDb, final int timeout) {
sendCommand(MIGRATE, host, toByteArray(port), key, toByteArray(destinationDb), toByteArray(timeout));
}
public void hincrByFloat(final byte[] key, final byte[] field, double increment) {
sendCommand(HINCRBYFLOAT, key, field, toByteArray(increment));
}
}package redis.clients.jedis;
import static redis.clients.jedis.Protocol.toByteArray;
import static redis.clients.jedis.Protocol.Command.*;
import static redis.clients.jedis.Protocol.Keyword.ENCODING;
import static redis.clients.jedis.Protocol.Keyword.IDLETIME;
import static redis.clients.jedis.Protocol.Keyword.LEN;
import static redis.clients.jedis.Protocol.Keyword.LIMIT;
import static redis.clients.jedis.Protocol.Keyword.NO;
import static redis.clients.jedis.Protocol.Keyword.ONE;
import static redis.clients.jedis.Protocol.Keyword.REFCOUNT;
import static redis.clients.jedis.Protocol.Keyword.RESET;
import static redis.clients.jedis.Protocol.Keyword.STORE;
import static redis.clients.jedis.Protocol.Keyword.WITHSCORES;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import redis.clients.jedis.Protocol.Command;
import redis.clients.jedis.Protocol.Keyword;
import redis.clients.util.SafeEncoder;
public class BinaryClient extends Connection {
public enum LIST_POSITION {
BEFORE, AFTER;
public final byte[] raw;
private LIST_POSITION() {
raw = SafeEncoder.encode(name());
}
}
private boolean isInMulti;
private String password;
private long db;
public boolean isInMulti() {
return isInMulti;
}
public BinaryClient(final String host) {
super(host);
}
public BinaryClient(final String host, final int port) {
super(host, port);
}
private byte[][] joinParameters(byte[] first, byte[][] rest) {
byte[][] result = new byte[rest.length + 1][];
result[0] = first;
for (int i = 0; i < rest.length; i++) {
result[i + 1] = rest[i];
}
return result;
}
public void setPassword(final String password) {
this.password = password;
}
@Override
public void connect() {
if (!isConnected()) {
super.connect();
if (password != null) {
auth(password);
getStatusCodeReply();
}
if (db > 0) {
select(Long.valueOf(db).intValue());
getStatusCodeReply();
}
}
}
public void ping() {
sendCommand(Command.PING);
}
public void set(final byte[] key, final byte[] value) {
sendCommand(Command.SET, key, value);
}
public void get(final byte[] key) {
sendCommand(Command.GET, key);
}
public void quit() {
db = 0;
sendCommand(QUIT);
}
public void exists(final byte[] key) {
sendCommand(EXISTS, key);
}
public void del(final byte[]... keys) {
sendCommand(DEL, keys);
}
public void type(final byte[] key) {
sendCommand(TYPE, key);
}
public void flushDB() {
sendCommand(FLUSHDB);
}
public void keys(final byte[] pattern) {
sendCommand(KEYS, pattern);
}
public void randomKey() {
sendCommand(RANDOMKEY);
}
public void rename(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAME, oldkey, newkey);
}
public void renamenx(final byte[] oldkey, final byte[] newkey) {
sendCommand(RENAMENX, oldkey, newkey);
}
public void dbSize() {
sendCommand(DBSIZE);
}
public void expire(final byte[] key, final int seconds) {
sendCommand(EXPIRE, key, toByteArray(seconds));
}
public void expireAt(final byte[] key, final long unixTime) {
sendCommand(EXPIREAT, key, toByteArray(unixTime));
}
public void ttl(final byte[] key) {
sendCommand(TTL, key);
}
public void select(final int index) {
db = index;
sendCommand(SELECT, toByteArray(index));
}
public void move(final byte[] key, final int dbIndex) {
sendCommand(MOVE, key, toByteArray(dbIndex));
}
public void flushAll() {
sendCommand(FLUSHALL);
}
public void getSet(final byte[] key, final byte[] value) {
sendCommand(GETSET, key, value);
}
public void mget(final byte[]... keys) {
sendCommand(MGET, keys);
}
public void setnx(final byte[] key, final byte[] value) {
sendCommand(SETNX, key, value);
}
public void setex(final byte[] key, final int seconds, final byte[] value) {
sendCommand(SETEX, key, toByteArray(seconds), value);
}
public void mset(final byte[]... keysvalues) {
sendCommand(MSET, keysvalues);
}
public void msetnx(final byte[]... keysvalues) {
sendCommand(MSETNX, keysvalues);
}
public void decrBy(final byte[] key, final long integer) {
sendCommand(DECRBY, key, toByteArray(integer));
}
public void decr(final byte[] key) {
sendCommand(DECR, key);
}
public void incrBy(final byte[] key, final long integer) {
sendCommand(INCRBY, key, toByteArray(integer));
}
public void incr(final byte[] key) {
sendCommand(INCR, key);
}
public void append(final byte[] key, final byte[] value) {
sendCommand(APPEND, key, value);
}
public void substr(final byte[] key, final int start, final int end) {
sendCommand(SUBSTR, key, toByteArray(start), toByteArray(end));
}
public void hset(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSET, key, field, value);
}
public void hget(final byte[] key, final byte[] field) {
sendCommand(HGET, key, field);
}
public void hsetnx(final byte[] key, final byte[] field, final byte[] value) {
sendCommand(HSETNX, key, field, value);
}
public void hmset(final byte[] key, final Map<byte[], byte[]> hash) {
final List<byte[]> params = new ArrayList<byte[]>();
params.add(key);
for (final Entry<byte[], byte[]> entry : hash.entrySet()) {
params.add(entry.getKey());
params.add(entry.getValue());
}
sendCommand(HMSET, params.toArray(new byte[params.size()][]));
}
public void hmget(final byte[] key, final byte[]... fields) {
final byte[][] params = new byte[fields.length + 1][];
params[0] = key;
System.arraycopy(fields, 0, params, 1, fields.length);
sendCommand(HMGET, params);
}
public void hincrBy(final byte[] key, final byte[] field, final long value) {
sendCommand(HINCRBY, key, field, toByteArray(value));
}
public void hexists(final byte[] key, final byte[] field) {
sendCommand(HEXISTS, key, field);
}
public void hdel(final byte[] key, final byte[]... fields) {
sendCommand(HDEL, joinParameters(key, fields));
}
public void hlen(final byte[] key) {
sendCommand(HLEN, key);
}
public void hkeys(final byte[] key) {
sendCommand(HKEYS, key);
}
public void hvals(final byte[] key) {
sendCommand(HVALS, key);
}
public void hgetAll(final byte[] key) {
sendCommand(HGETALL, key);
}
public void rpush(final byte[] key, final byte[]... strings) {
sendCommand(RPUSH, joinParameters(key, strings));
}
public void lpush(final byte[] key, final byte[]... strings) {
sendCommand(LPUSH, joinParameters(key, strings));
}
public void llen(final byte[] key) {
sendCommand(LLEN, key);
}
public void lrange(final byte[] key, final long start, final long end) {
sendCommand(LRANGE, key, toByteArray(start), toByteArray(end));
}
public void ltrim(final byte[] key, final long start, final long end) {
sendCommand(LTRIM, key, toByteArray(start), toByteArray(end));
}
public void lindex(final byte[] key, final long index) {
sendCommand(LINDEX, key, toByteArray(index));
}
public void lset(final byte[] key, final long index, final byte[] value) {
sendCommand(LSET, key, toByteArray(index), value);
}
public void lrem(final byte[] key, long count, final byte[] value) {
sendCommand(LREM, key, toByteArray(count), value);
}
public void lpop(final byte[] key) {
sendCommand(LPOP, key);
}
public void rpop(final byte[] key) {
sendCommand(RPOP, key);
}
public void rpoplpush(final byte[] srckey, final byte[] dstkey) {
sendCommand(RPOPLPUSH, srckey, dstkey);
}
public void sadd(final byte[] key, final byte[]... members) {
sendCommand(SADD, joinParameters(key, members));
}
public void smembers(final byte[] key) {
sendCommand(SMEMBERS, key);
}
public void srem(final byte[] key, final byte[]... members) {
sendCommand(SREM, joinParameters(key, members));
}
public void spop(final byte[] key) {
sendCommand(SPOP, key);
}
public void smove(final byte[] srckey, final byte[] dstkey,
final byte[] member) {
sendCommand(SMOVE, srckey, dstkey, member);
}
public void scard(final byte[] key) {
sendCommand(SCARD, key);
}
public void sismember(final byte[] key, final byte[] member) {
sendCommand(SISMEMBER, key, member);
}
public void sinter(final byte[]... keys) {
sendCommand(SINTER, keys);
}
public void sinterstore(final byte[] dstkey, final byte[]... keys) {
final byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SINTERSTORE, params);
}
public void sunion(final byte[]... keys) {
sendCommand(SUNION, keys);
}
public void sunionstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SUNIONSTORE, params);
}
public void sdiff(final byte[]... keys) {
sendCommand(SDIFF, keys);
}
public void sdiffstore(final byte[] dstkey, final byte[]... keys) {
byte[][] params = new byte[keys.length + 1][];
params[0] = dstkey;
System.arraycopy(keys, 0, params, 1, keys.length);
sendCommand(SDIFFSTORE, params);
}
public void srandmember(final byte[] key) {
sendCommand(SRANDMEMBER, key);
}
public void zadd(final byte[] key, final double score, final byte[] member) {
sendCommand(ZADD, key, toByteArray(score), member);
}
public void zaddBinary(final byte[] key, Map<Double, byte[]> scoreMembers) {
ArrayList<byte[]> args = new ArrayList<byte[]>(
scoreMembers.size() * 2 + 1);
args.add(key);
for (Map.Entry<Double, byte[]> entry : scoreMembers.entrySet()) {
args.add(toByteArray(entry.getKey()));
args.add(entry.getValue());
}
byte[][] argsArray = new byte[args.size()][];
args.toArray(argsArray);
sendCommand(ZADD, argsArray);
}
public void zrange(final byte[] key, final long start, final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrem(final byte[] key, final byte[]... members) {
sendCommand(ZREM, joinParameters(key, members));
}
public void zincrby(final byte[] key, final double score,
final byte[] member) {
sendCommand(ZINCRBY, key, toByteArray(score), member);
}
public void zrank(final byte[] key, final byte[] member) {
sendCommand(ZRANK, key, member);
}
public void zrevrank(final byte[] key, final byte[] member) {
sendCommand(ZREVRANK, key, member);
}
public void zrevrange(final byte[] key, final long start, final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end));
}
public void zrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zrevrangeWithScores(final byte[] key, final long start,
final long end) {
sendCommand(ZREVRANGE, key, toByteArray(start), toByteArray(end),
WITHSCORES.raw);
}
public void zcard(final byte[] key) {
sendCommand(ZCARD, key);
}
public void zscore(final byte[] key, final byte[] member) {
sendCommand(ZSCORE, key, member);
}
public void multi() {
sendCommand(MULTI);
isInMulti = true;
}
public void discard() {
sendCommand(DISCARD);
isInMulti = false;
}
public void exec() {
sendCommand(EXEC);
isInMulti = false;
}
public void watch(final byte[]... keys) {
sendCommand(WATCH, keys);
}
public void unwatch() {
sendCommand(UNWATCH);
}
public void sort(final byte[] key) {
sendCommand(SORT, key);
}
public void sort(final byte[] key, final SortingParams sortingParameters) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void blpop(final byte[][] args) {
sendCommand(BLPOP, args);
}
public void blpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
blpop(args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final SortingParams sortingParameters,
final byte[] dstkey) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(key);
args.addAll(sortingParameters.getParams());
args.add(STORE.raw);
args.add(dstkey);
sendCommand(SORT, args.toArray(new byte[args.size()][]));
}
public void sort(final byte[] key, final byte[] dstkey) {
sendCommand(SORT, key, STORE.raw, dstkey);
}
public void brpop(final byte[][] args) {
sendCommand(BRPOP, args);
}
public void brpop(final int timeout, final byte[]... keys) {
final List<byte[]> args = new ArrayList<byte[]>();
for (final byte[] arg : keys) {
args.add(arg);
}
args.add(Protocol.toByteArray(timeout));
brpop(args.toArray(new byte[args.size()][]));
}
public void auth(final String password) {
setPassword(password);
sendCommand(AUTH, password);
}
public void subscribe(final byte[]... channels) {
sendCommand(SUBSCRIBE, channels);
}
public void publish(final byte[] channel, final byte[] message) {
sendCommand(PUBLISH, channel, message);
}
public void unsubscribe() {
sendCommand(UNSUBSCRIBE);
}
public void unsubscribe(final byte[]... channels) {
sendCommand(UNSUBSCRIBE, channels);
}
public void psubscribe(final byte[]... patterns) {
sendCommand(PSUBSCRIBE, patterns);
}
public void punsubscribe() {
sendCommand(PUNSUBSCRIBE);
}
public void punsubscribe(final byte[]... patterns) {
sendCommand(PUNSUBSCRIBE, patterns);
}
<<<<<<< MINE
public void zcount(final byte[] key, final byte[] min, final byte[] max) {
sendCommand(ZCOUNT, key, min, max);
=======
public void zcount(final byte[] key, final double min, final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZCOUNT, key, byteArrayMin, byteArrayMax);
}
public void zcount(final byte[] key, final byte min[], final byte max[]) {
sendCommand(ZCOUNT, key, min, max);
}
public void zcount(final byte[] key, final String min, final String max) {
sendCommand(ZCOUNT, key, min.getBytes(), max.getBytes());
}
public void zrangeByScore(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax);
>>>>>>> YOURS
}
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max);
}
public void zrangeByScore(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes());
}
public void zrevrangeByScore(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin);
}
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min);
}
<<<<<<< MINE
public void zrangeByScore(final byte[] key, final byte[] min,
final byte[] max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count));
=======
public void zrevrangeByScore(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes());
}
public void zrangeByScore(final byte[] key, final double min,
final double max, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final byte min[],
final byte max[], final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrangeByScore(final byte[] key, final String min,
final String max, final int offset, int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
>>>>>>> YOURS
}
<<<<<<< MINE
public void zrevrangeByScore(final byte[] key, final byte[] max,
final byte[] min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count));
=======
public void zrevrangeByScore(final byte[] key, final double max,
final double min, final int offset, int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final byte max[],
final byte min[], final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count));
}
public void zrevrangeByScore(final byte[] key, final String max,
final String min, final int offset, int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count));
>>>>>>> YOURS
}
<<<<<<< MINE
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max) {
sendCommand(ZRANGEBYSCORE, key, min, max, WITHSCORES.raw);
=======
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[]) {
sendCommand(ZRANGEBYSCORE, key, min, max,
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
WITHSCORES.raw);
>>>>>>> YOURS
}
<<<<<<< MINE
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, WITHSCORES.raw);
=======
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[]) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
WITHSCORES.raw);
>>>>>>> YOURS
}
<<<<<<< MINE
public void zrangeByScoreWithScores(final byte[] key, final byte[] min,
final byte[] max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
=======
public void zrangeByScoreWithScores(final byte[] key, final double min,
final double max, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZRANGEBYSCORE, key, byteArrayMin, byteArrayMax,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final byte min[],
final byte max[], final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min, max,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrangeByScoreWithScores(final byte[] key, final String min,
final String max, final int offset, final int count) {
sendCommand(ZRANGEBYSCORE, key, min.getBytes(), max.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
>>>>>>> YOURS
}
<<<<<<< MINE
public void zrevrangeByScoreWithScores(final byte[] key, final byte[] max,
final byte[] min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min, LIMIT.raw,
toByteArray(offset), toByteArray(count), WITHSCORES.raw);
=======
public void zrevrangeByScoreWithScores(final byte[] key, final double max,
final double min, final int offset, final int count) {
byte byteArrayMin[] = (min == Double.NEGATIVE_INFINITY) ? "-inf".getBytes() : toByteArray(min);
byte byteArrayMax[] = (max == Double.POSITIVE_INFINITY) ? "+inf".getBytes() : toByteArray(max);
sendCommand(ZREVRANGEBYSCORE, key, byteArrayMax, byteArrayMin,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final byte max[],
final byte min[], final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max, min,
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
}
public void zrevrangeByScoreWithScores(final byte[] key, final String max,
final String min, final int offset, final int count) {
sendCommand(ZREVRANGEBYSCORE, key, max.getBytes(), min.getBytes(),
LIMIT.raw, toByteArray(offset), toByteArray(count),
WITHSCORES.raw);
>>>>>>> YOURS
}
public void zremrangeByRank(final byte[] key, final long start,
final long end) {
sendCommand(ZREMRANGEBYRANK, key, toByteArray(start), toByteArray(end));
}
public void zremrangeByScore(final byte[] key, final byte[] start,
final byte[] end) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zremrangeByScore(final byte[] key, final byte start[],
final byte end[]) {
sendCommand(ZREMRANGEBYSCORE, key, start, end);
}
public void zremrangeByScore(final byte[] key, final String start,
final String end) {
sendCommand(ZREMRANGEBYSCORE, key, start.getBytes(), end.getBytes());
}
public void zunionstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZUNIONSTORE, params);
}
public void zunionstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZUNIONSTORE, args.toArray(new byte[args.size()][]));
}
public void zinterstore(final byte[] dstkey, final byte[]... sets) {
final byte[][] params = new byte[sets.length + 2][];
params[0] = dstkey;
params[1] = Protocol.toByteArray(sets.length);
System.arraycopy(sets, 0, params, 2, sets.length);
sendCommand(ZINTERSTORE, params);
}
public void zinterstore(final byte[] dstkey, final ZParams params,
final byte[]... sets) {
final List<byte[]> args = new ArrayList<byte[]>();
args.add(dstkey);
args.add(Protocol.toByteArray(sets.length));
for (final byte[] set : sets) {
args.add(set);
}
args.addAll(params.getParams());
sendCommand(ZINTERSTORE, args.toArray(new byte[args.size()][]));
}
public void save() {
sendCommand(SAVE);
}
public void bgsave() {
sendCommand(BGSAVE);
}
public void bgrewriteaof() {
sendCommand(BGREWRITEAOF);
}
public void lastsave() {
sendCommand(LASTSAVE);
}
public void shutdown() {
sendCommand(SHUTDOWN);
}
public void info() {
sendCommand(INFO);
}
public void info(final String section) {
sendCommand(INFO, section);
}
public void monitor() {
sendCommand(MONITOR);
}
public void slaveof(final String host, final int port) {
sendCommand(SLAVEOF, host, String.valueOf(port));
}
public void slaveofNoOne() {
sendCommand(SLAVEOF, NO.raw, ONE.raw);
}
public void configGet(final byte[] pattern) {
sendCommand(CONFIG, Keyword.GET.raw, pattern);
}
public void configSet(final byte[] parameter, final byte[] value) {
sendCommand(CONFIG, Keyword.SET.raw, parameter, value);
}
public void strlen(final byte[] key) {
sendCommand(STRLEN, key);
}
public void sync() {
sendCommand(SYNC);
}
public void lpushx(final byte[] key, final byte[]... string) {
sendCommand(LPUSHX, joinParameters(key, string));
}
public void persist(final byte[] key) {
sendCommand(PERSIST, key);
}
public void rpushx(final byte[] key, final byte[]... string) {
sendCommand(RPUSHX, joinParameters(key, string));
}
public void echo(final byte[] string) {
sendCommand(ECHO, string);
}
public void linsert(final byte[] key, final LIST_POSITION where,
final byte[] pivot, final byte[] value) {
sendCommand(LINSERT, key, where.raw, pivot, value);
}
public void debug(final DebugParams params) {
sendCommand(DEBUG, params.getCommand());
}
public void brpoplpush(final byte[] source, final byte[] destination,
final int timeout) {
sendCommand(BRPOPLPUSH, source, destination, toByteArray(timeout));
}
public void configResetStat() {
sendCommand(CONFIG, Keyword.RESETSTAT.name());
}
public void setbit(byte[] key, long offset, byte[] value) {
sendCommand(SETBIT, key, toByteArray(offset), value);
}
public void setbit(byte[] key, long offset, boolean value) {
sendCommand(SETBIT, key, toByteArray(offset), toByteArray(value));
}
public void getbit(byte[] key, long offset) {
sendCommand(GETBIT, key, toByteArray(offset));
}
public void setrange(byte[] key, long offset, byte[] value) {
sendCommand(SETRANGE, key, toByteArray(offset), value);
}
public void getrange(byte[] key, long startOffset, long endOffset) {
sendCommand(GETRANGE, key, toByteArray(startOffset),
toByteArray(endOffset));
}
public Long getDB() {
return db;
}
public void disconnect() {
db = 0;
super.disconnect();
}
private void sendEvalCommand(Command command, byte[] script,
byte[] keyCount, byte[][] params) {
final byte[][] allArgs = new byte[params.length + 2][];
allArgs[0] = script;
allArgs[1] = keyCount;
for (int i = 0; i < params.length; i++)
allArgs[i + 2] = params[i];
sendCommand(command, allArgs);
}
public void eval(byte[] script, byte[] keyCount, byte[][] params) {
sendEvalCommand(EVAL, script, keyCount, params);
}
public void eval(byte[] script, int keyCount, byte[]... params) {
eval(script, toByteArray(keyCount), params);
}
public void evalsha(byte[] sha1, byte[] keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, keyCount, params);
}
public void evalsha(byte[] sha1, int keyCount, byte[]... params) {
sendEvalCommand(EVALSHA, sha1, toByteArray(keyCount), params);
}
public void scriptFlush() {
sendCommand(SCRIPT, Keyword.FLUSH.raw);
}
public void scriptExists(byte[]... sha1) {
byte[][] args = new byte[sha1.length + 1][];
args[0] = Keyword.EXISTS.raw;
for (int i = 0; i < sha1.length; i++)
args[i + 1] = sha1[i];
sendCommand(SCRIPT, args);
}
public void scriptLoad(byte[] script) {
sendCommand(SCRIPT, Keyword.LOAD.raw, script);
}
public void scriptKill() {
sendCommand(SCRIPT, Keyword.KILL.raw);
}
public void slowlogGet() {
sendCommand(SLOWLOG, Keyword.GET.raw);
}
public void slowlogGet(long entries) {
sendCommand(SLOWLOG, Keyword.GET.raw, toByteArray(entries));
}
public void slowlogReset() {
sendCommand(SLOWLOG, RESET.raw);
}
public void slowlogLen() {
sendCommand(SLOWLOG, LEN.raw);
}
public void objectRefcount(byte[] key) {
sendCommand(OBJECT, REFCOUNT.raw, key);
}
public void objectIdletime(byte[] key) {
sendCommand(OBJECT, IDLETIME.raw, key);
}
public void objectEncoding(byte[] key) {
sendCommand(OBJECT, ENCODING.raw, key);
}
public void bitcount(byte[] key) {
sendCommand(BITCOUNT, key);
}
public void bitcount(byte[] key, long start, long end) {
sendCommand(BITCOUNT, key, toByteArray(start), toByteArray(end));
}
public void bitop(BitOP op, byte[] destKey, byte[]... srcKeys) {
Keyword kw = Keyword.AND;
int len = srcKeys.length;
switch (op) {
case AND:
kw = Keyword.AND;
break;
case OR:
kw = Keyword.OR;
break;
case XOR:
kw = Keyword.XOR;
break;
case NOT:
kw = Keyword.NOT;
len = Math.min(1, len);
break;
}
byte[][] bargs = new byte[len + 2][];
bargs[0] = kw.raw;
bargs[1] = destKey;
for (int i = 0; i < len; ++i) {
bargs[i + 2] = srcKeys[i];
}
sendCommand(BITOP, bargs);
}
public void sentinel(final byte[]... args) {
sendCommand(SENTINEL, args);
}
public void dump(final byte[] key) {
sendCommand(DUMP, key);
}
public void restore(final byte[] key, final int ttl, final byte[] serializedValue) {
sendCommand(RESTORE, key, toByteArray(ttl), serializedValue);
}
public void pexpire(final byte[] key, final int milliseconds) {
sendCommand(PEXPIRE, key, toByteArray(milliseconds));
}
public void pexpireAt(final byte[] key, final long millisecondsTimestamp) {
sendCommand(PEXPIREAT, key, toByteArray(millisecondsTimestamp));
}
public void pttl(final byte[] key) {
sendCommand(PTTL, key);
}
public void incrByFloat(final byte[] key, final double increment) {
sendCommand(INCRBYFLOAT, key, toByteArray(increment));
}
public void psetex(final byte[] key, final int milliseconds, final byte[] value) {
sendCommand(PSETEX, key, toByteArray(milliseconds), value);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx) {
sendCommand(Command.SET, key, value, nxxx);
}
public void set(final byte[] key, final byte[] value, final byte[] nxxx, final byte[] expx, final int time) {
sendCommand(Command.SET, key, value, nxxx, expx, toByteArray(time));
}
public void srandmember(final byte[] key, final int count) {
sendCommand(SRANDMEMBER, key, toByteArray(count));
}
public void clientKill(final byte[] client) {
sendCommand(CLIENT, Keyword.KILL.raw, client);
}
public void clientGetname() {
sendCommand(CLIENT, Keyword.GETNAME.raw);
}
public void clientList() {
sendCommand(CLIENT, Keyword.LIST.raw);
}
public void clientSetname(final byte[] name) {
sendCommand(CLIENT, Keyword.SETNAME.raw, name);
}
public void time() {
sendCommand(TIME);
}
public void migrate(final byte[] host, final int port, final byte[] key, final int destinationDb, final int timeout) {
sendCommand(MIGRATE, host, toByteArray(port), key, toByteArray(destinationDb), toByteArray(timeout));
}
public void hincrByFloat(final byte[] key, final byte[] field, double increment) {
sendCommand(HINCRBYFLOAT, key, field, toByteArray(increment));
}
}
Modified arguument types to (final Command cmd, final String... args)
Modified body
Added overloaded versions
Right modified body
Unstructured reported conflict
Safe reported conflict
MergeMethods reported conflict only on body of most similar pair of methods
KeepBothMethods kept all versions
Base
package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(String host) {
super();
this.host = host;
}
protected Connection sendCommand(String name, String... args) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, name, args);
pipelinedCommands++;
return this;
}
public Connection(String host, int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed()
&& socket.isConnected() && !socket.isInputShutdown()
&& !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
return (String) protocol.read(inputStream);
}
public String getBulkReply() {
pipelinedCommands--;
return (String) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<String> getMultiBulkReply() {
pipelinedCommands--;
return (List<String>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(String host) {
super();
this.host = host;
}
protected Connection sendCommand(String name, String... args) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, name, args);
pipelinedCommands++;
return this;
}
public Connection(String host, int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed()
&& socket.isConnected() && !socket.isInputShutdown()
&& !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
return (String) protocol.read(inputStream);
}
public String getBulkReply() {
pipelinedCommands--;
return (String) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<String> getMultiBulkReply() {
pipelinedCommands--;
return (List<String>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}
Left
package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.jedis.Protocol.Command;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(final int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(final String host) {
super();
this.host = host;
}
protected Connection sendCommand(final Command cmd, final String... args) {
final byte[][] bargs = new byte[args.length][];
for(int i=0; i < args.length; i++) {
bargs[i] = args[i].getBytes(Protocol.UTF8);
}
return sendCommand(cmd, bargs);
}
protected Connection sendCommand(final Command cmd, final byte[]... args) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, args);
pipelinedCommands++;
return this;
}
protected Connection sendCommand(final Command cmd) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, new byte[0][]);
pipelinedCommands++;
return this;
}
public Connection(final String host, final int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(final String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(final int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed()
&& socket.isConnected() && !socket.isInputShutdown()
&& !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
final byte[] resp = (byte[]) protocol.read(inputStream);
if (null == resp) {
return null;
} else {
return new String(resp, Protocol.UTF8);
}
}
public String getBulkReply() {
final byte[] result = getBinaryBulkReply();
if (null != result) {
return new String(result, Protocol.UTF8);
} else {
return null;
}
}
public byte[] getBinaryBulkReply() {
pipelinedCommands--;
return (byte[]) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
public List<String> getMultiBulkReply() {
final List<byte[]> bresult = getBinaryMultiBulkReply();
if(null == bresult) {
return null;
}
final ArrayList<String> result = new ArrayList<String>(bresult.size());
for(final byte[] barray : bresult) {
if( barray == null) {
result.add(null);
} else {
result.add(new String(barray, Protocol.UTF8));
}
}
return result;
}
@SuppressWarnings("unchecked")
public List<byte[]> getBinaryMultiBulkReply() {
pipelinedCommands--;
return (List<byte[]>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.jedis.Protocol.Command;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(final int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(final String host) {
super();
this.host = host;
}
protected Connection sendCommand(final Command cmd, final String... args) {
final byte[][] bargs = new byte[args.length][];
for(int i=0; i < args.length; i++) {
bargs[i] = args[i].getBytes(Protocol.UTF8);
}
return sendCommand(cmd, bargs);
}
protected Connection sendCommand(final Command cmd, final byte[]... args) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, args);
pipelinedCommands++;
return this;
}
protected Connection sendCommand(final Command cmd) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, new byte[0][]);
pipelinedCommands++;
return this;
}
public Connection(final String host, final int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(final String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(final int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed()
&& socket.isConnected() && !socket.isInputShutdown()
&& !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
final byte[] resp = (byte[]) protocol.read(inputStream);
if (null == resp) {
return null;
} else {
return new String(resp, Protocol.UTF8);
}
}
public String getBulkReply() {
final byte[] result = getBinaryBulkReply();
if (null != result) {
return new String(result, Protocol.UTF8);
} else {
return null;
}
}
public byte[] getBinaryBulkReply() {
pipelinedCommands--;
return (byte[]) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
public List<String> getMultiBulkReply() {
final List<byte[]> bresult = getBinaryMultiBulkReply();
if(null == bresult) {
return null;
}
final ArrayList<String> result = new ArrayList<String>(bresult.size());
for(final byte[] barray : bresult) {
if( barray == null) {
result.add(null);
} else {
result.add(new String(barray, Protocol.UTF8));
}
}
return result;
}
@SuppressWarnings("unchecked")
public List<byte[]> getBinaryMultiBulkReply() {
pipelinedCommands--;
return (List<byte[]>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}
Right
package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(String host) {
super();
this.host = host;
}
protected Connection sendCommand(String name, String... args) {
protocol.sendCommand(outputStream, name, args);
pipelinedCommands++;
return this;
}
public Connection(String host, int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed()
&& socket.isConnected() && !socket.isInputShutdown()
&& !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
return (String) protocol.read(inputStream);
}
public String getBulkReply() {
pipelinedCommands--;
return (String) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<String> getMultiBulkReply() {
pipelinedCommands--;
return (List<String>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(String host) {
super();
this.host = host;
}
protected Connection sendCommand(String name, String... args) {
protocol.sendCommand(outputStream, name, args);
pipelinedCommands++;
return this;
}
public Connection(String host, int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed()
&& socket.isConnected() && !socket.isInputShutdown()
&& !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
return (String) protocol.read(inputStream);
}
public String getBulkReply() {
pipelinedCommands--;
return (String) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<String> getMultiBulkReply() {
pipelinedCommands--;
return (List<String>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}
MergeMethods
package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.jedis.Protocol.Command;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(final int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(final String host) {
super();
this.host = host;
}
protected Connection sendCommand(final Command cmd, final String... args) {
final byte[][] bargs = new byte[args.length][];
for(int i=0; i < args.length; i++) {
bargs[i] = args[i].getBytes(Protocol.UTF8);
}
return sendCommand(cmd, bargs);
}
protected Connection sendCommand(final Command cmd) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, new byte[0][]);
pipelinedCommands++;
return this;
}
protected Connection sendCommand(final Command cmd, final byte[]... args) {
<<<<<<< MINE
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, args);
=======
protocol.sendCommand(outputStream, name, args);
>>>>>>> YOURS
pipelinedCommands++;
return this;
}
public Connection(final String host, final int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(final String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(final int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed()
&& socket.isConnected() && !socket.isInputShutdown()
&& !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
final byte[] resp = (byte[]) protocol.read(inputStream);
if (null == resp) {
return null;
} else {
return new String(resp, Protocol.UTF8);
}
}
public String getBulkReply() {
final byte[] result = getBinaryBulkReply();
if (null != result) {
return new String(result, Protocol.UTF8);
} else {
return null;
}
}
public byte[] getBinaryBulkReply() {
pipelinedCommands--;
return (byte[]) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
public List<String> getMultiBulkReply() {
final List<byte[]> bresult = getBinaryMultiBulkReply();
if(null == bresult) {
return null;
}
final ArrayList<String> result = new ArrayList<String>(bresult.size());
for(final byte[] barray : bresult) {
if( barray == null) {
result.add(null);
} else {
result.add(new String(barray, Protocol.UTF8));
}
}
return result;
}
@SuppressWarnings("unchecked")
public List<byte[]> getBinaryMultiBulkReply() {
pipelinedCommands--;
return (List<byte[]>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}
package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.jedis.Protocol.Command;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(final int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(final String host) {
super();
this.host = host;
}
protected Connection sendCommand(final Command cmd, final String... args) {
final byte[][] bargs = new byte[args.length][];
for(int i=0; i < args.length; i++) {
bargs[i] = args[i].getBytes(Protocol.UTF8);
}
return sendCommand(cmd, bargs);
}
protected Connection sendCommand(final Command cmd) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, new byte[0][]);
pipelinedCommands++;
return this;
}
protected Connection sendCommand(final Command cmd, final byte[]... args) {
<<<<<<< MINE
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, args);
=======
protocol.sendCommand(outputStream, name, args);
>>>>>>> YOURS
pipelinedCommands++;
return this;
}
public Connection(final String host, final int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(final String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(final int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed()
&& socket.isConnected() && !socket.isInputShutdown()
&& !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
final byte[] resp = (byte[]) protocol.read(inputStream);
if (null == resp) {
return null;
} else {
return new String(resp, Protocol.UTF8);
}
}
public String getBulkReply() {
final byte[] result = getBinaryBulkReply();
if (null != result) {
return new String(result, Protocol.UTF8);
} else {
return null;
}
}
public byte[] getBinaryBulkReply() {
pipelinedCommands--;
return (byte[]) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
public List<String> getMultiBulkReply() {
final List<byte[]> bresult = getBinaryMultiBulkReply();
if(null == bresult) {
return null;
}
final ArrayList<String> result = new ArrayList<String>(bresult.size());
for(final byte[] barray : bresult) {
if( barray == null) {
result.add(null);
} else {
result.add(new String(barray, Protocol.UTF8));
}
}
return result;
}
@SuppressWarnings("unchecked")
public List<byte[]> getBinaryMultiBulkReply() {
pipelinedCommands--;
return (List<byte[]>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}
KeepBothMethods
package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.jedis.Protocol.Command;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(final int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(final String host) {
super();
this.host = host;
}
protected Connection sendCommand(final Command cmd, final String... args) {
final byte[][] bargs = new byte[args.length][];
for (int i = 0; i < args.length; i++) {
bargs[i] = args[i].getBytes(Protocol.UTF8);
}
return sendCommand(cmd, bargs);
}
protected Connection sendCommand(final Command cmd, final byte[]... args) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, args);
pipelinedCommands++;
return this;
}
protected Connection sendCommand(final Command cmd) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, new byte[0][]);
pipelinedCommands++;
return this;
}
protected Connection sendCommand(String name, String... args) {
protocol.sendCommand(outputStream, name, args);
pipelinedCommands++;
return this;
}
public Connection(final String host, final int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(final String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(final int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed() && socket.isConnected() && !socket.isInputShutdown() && !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
final byte[] resp = (byte[]) protocol.read(inputStream);
if (null == resp) {
return null;
} else {
return new String(resp, Protocol.UTF8);
}
}
public String getBulkReply() {
final byte[] result = getBinaryBulkReply();
if (null != result) {
return new String(result, Protocol.UTF8);
} else {
return null;
}
}
public byte[] getBinaryBulkReply() {
pipelinedCommands--;
return (byte[]) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
public List<String> getMultiBulkReply() {
final List<byte[]> bresult = getBinaryMultiBulkReply();
if (null == bresult) {
return null;
}
final ArrayList<String> result = new ArrayList<String>(bresult.size());
for (final byte[] barray : bresult) {
if (barray == null) {
result.add(null);
} else {
result.add(new String(barray, Protocol.UTF8));
}
}
return result;
}
@SuppressWarnings("unchecked")
public List<byte[]> getBinaryMultiBulkReply() {
pipelinedCommands--;
return (List<byte[]>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}
package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.jedis.Protocol.Command;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(final int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(final String host) {
super();
this.host = host;
}
protected Connection sendCommand(final Command cmd, final String... args) {
final byte[][] bargs = new byte[args.length][];
for (int i = 0; i < args.length; i++) {
bargs[i] = args[i].getBytes(Protocol.UTF8);
}
return sendCommand(cmd, bargs);
}
protected Connection sendCommand(final Command cmd, final byte[]... args) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, args);
pipelinedCommands++;
return this;
}
protected Connection sendCommand(final Command cmd) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, new byte[0][]);
pipelinedCommands++;
return this;
}
protected Connection sendCommand(String name, String... args) {
protocol.sendCommand(outputStream, name, args);
pipelinedCommands++;
return this;
}
public Connection(final String host, final int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(final String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(final int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed() && socket.isConnected() && !socket.isInputShutdown() && !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
final byte[] resp = (byte[]) protocol.read(inputStream);
if (null == resp) {
return null;
} else {
return new String(resp, Protocol.UTF8);
}
}
public String getBulkReply() {
final byte[] result = getBinaryBulkReply();
if (null != result) {
return new String(result, Protocol.UTF8);
} else {
return null;
}
}
public byte[] getBinaryBulkReply() {
pipelinedCommands--;
return (byte[]) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
public List<String> getMultiBulkReply() {
final List<byte[]> bresult = getBinaryMultiBulkReply();
if (null == bresult) {
return null;
}
final ArrayList<String> result = new ArrayList<String>(bresult.size());
for (final byte[] barray : bresult) {
if (barray == null) {
result.add(null);
} else {
result.add(new String(barray, Protocol.UTF8));
}
}
return result;
}
@SuppressWarnings("unchecked")
public List<byte[]> getBinaryMultiBulkReply() {
pipelinedCommands--;
return (List<byte[]>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}
Safe
package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.jedis.Protocol.Command;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(final int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(final String host) {
super();
this.host = host;
}
protected Connection sendCommand(final Command cmd, final String... args) {
final byte[][] bargs = new byte[args.length][];
for(int i=0; i < args.length; i++) {
bargs[i] = args[i].getBytes(Protocol.UTF8);
}
return sendCommand(cmd, bargs);
}
protected Connection sendCommand(final Command cmd) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, new byte[0][]);
pipelinedCommands++;
return this;
}
<<<<<<< MINE
protected Connection sendCommand(String name, String... args) {
protocol.sendCommand(outputStream, name, args);
=======
protected Connection sendCommand(final Command cmd, final byte[]... args) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, args);
>>>>>>> YOURS
pipelinedCommands++;
return this;
}
public Connection(final String host, final int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(final String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(final int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed()
&& socket.isConnected() && !socket.isInputShutdown()
&& !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
final byte[] resp = (byte[]) protocol.read(inputStream);
if (null == resp) {
return null;
} else {
return new String(resp, Protocol.UTF8);
}
}
public String getBulkReply() {
final byte[] result = getBinaryBulkReply();
if (null != result) {
return new String(result, Protocol.UTF8);
} else {
return null;
}
}
public byte[] getBinaryBulkReply() {
pipelinedCommands--;
return (byte[]) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
public List<String> getMultiBulkReply() {
final List<byte[]> bresult = getBinaryMultiBulkReply();
if(null == bresult) {
return null;
}
final ArrayList<String> result = new ArrayList<String>(bresult.size());
for(final byte[] barray : bresult) {
if( barray == null) {
result.add(null);
} else {
result.add(new String(barray, Protocol.UTF8));
}
}
return result;
}
@SuppressWarnings("unchecked")
public List<byte[]> getBinaryMultiBulkReply() {
pipelinedCommands--;
return (List<byte[]>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}
package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.jedis.Protocol.Command;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(final int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(final String host) {
super();
this.host = host;
}
protected Connection sendCommand(final Command cmd, final String... args) {
final byte[][] bargs = new byte[args.length][];
for(int i=0; i < args.length; i++) {
bargs[i] = args[i].getBytes(Protocol.UTF8);
}
return sendCommand(cmd, bargs);
}
protected Connection sendCommand(final Command cmd) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, new byte[0][]);
pipelinedCommands++;
return this;
}
<<<<<<< MINE
protected Connection sendCommand(String name, String... args) {
protocol.sendCommand(outputStream, name, args);
=======
protected Connection sendCommand(final Command cmd, final byte[]... args) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, args);
>>>>>>> YOURS
pipelinedCommands++;
return this;
}
public Connection(final String host, final int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(final String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(final int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed()
&& socket.isConnected() && !socket.isInputShutdown()
&& !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
final byte[] resp = (byte[]) protocol.read(inputStream);
if (null == resp) {
return null;
} else {
return new String(resp, Protocol.UTF8);
}
}
public String getBulkReply() {
final byte[] result = getBinaryBulkReply();
if (null != result) {
return new String(result, Protocol.UTF8);
} else {
return null;
}
}
public byte[] getBinaryBulkReply() {
pipelinedCommands--;
return (byte[]) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
public List<String> getMultiBulkReply() {
final List<byte[]> bresult = getBinaryMultiBulkReply();
if(null == bresult) {
return null;
}
final ArrayList<String> result = new ArrayList<String>(bresult.size());
for(final byte[] barray : bresult) {
if( barray == null) {
result.add(null);
} else {
result.add(new String(barray, Protocol.UTF8));
}
}
return result;
}
@SuppressWarnings("unchecked")
public List<byte[]> getBinaryMultiBulkReply() {
pipelinedCommands--;
return (List<byte[]>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}
Unstructured
package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.jedis.Protocol.Command;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(final int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(final String host) {
super();
this.host = host;
}
<<<<<<< MINE
protected Connection sendCommand(final Command cmd, final String... args) {
final byte[][] bargs = new byte[args.length][];
for(int i=0; i < args.length; i++) {
bargs[i] = args[i].getBytes(Protocol.UTF8);
}
return sendCommand(cmd, bargs);
}
protected Connection sendCommand(final Command cmd, final byte[]... args) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, args);
=======
protected Connection sendCommand(String name, String... args) {
protocol.sendCommand(outputStream, name, args);
>>>>>>> YOURS
pipelinedCommands++;
return this;
}
protected Connection sendCommand(final Command cmd) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, new byte[0][]);
pipelinedCommands++;
return this;
}
public Connection(final String host, final int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(final String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(final int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed()
&& socket.isConnected() && !socket.isInputShutdown()
&& !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
final byte[] resp = (byte[]) protocol.read(inputStream);
if (null == resp) {
return null;
} else {
return new String(resp, Protocol.UTF8);
}
}
public String getBulkReply() {
final byte[] result = getBinaryBulkReply();
if (null != result) {
return new String(result, Protocol.UTF8);
} else {
return null;
}
}
public byte[] getBinaryBulkReply() {
pipelinedCommands--;
return (byte[]) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
public List<String> getMultiBulkReply() {
final List<byte[]> bresult = getBinaryMultiBulkReply();
if(null == bresult) {
return null;
}
final ArrayList<String> result = new ArrayList<String>(bresult.size());
for(final byte[] barray : bresult) {
if( barray == null) {
result.add(null);
} else {
result.add(new String(barray, Protocol.UTF8));
}
}
return result;
}
@SuppressWarnings("unchecked")
public List<byte[]> getBinaryMultiBulkReply() {
pipelinedCommands--;
return (List<byte[]>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}package redis.clients.jedis;
import java.io.IOException;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import redis.clients.jedis.Protocol.Command;
import redis.clients.util.RedisInputStream;
import redis.clients.util.RedisOutputStream;
public class Connection {
private String host;
private int port = Protocol.DEFAULT_PORT;
private Socket socket;
private Protocol protocol = new Protocol();
private RedisOutputStream outputStream;
private RedisInputStream inputStream;
private int pipelinedCommands = 0;
private int timeout = 2000;
public int getTimeout() {
return timeout;
}
public void setTimeout(final int timeout) {
this.timeout = timeout;
}
public void setTimeoutInfinite() {
try {
socket.setSoTimeout(0);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public void rollbackTimeout() {
try {
socket.setSoTimeout(timeout);
} catch (SocketException ex) {
throw new JedisException(ex);
}
}
public Connection(final String host) {
super();
this.host = host;
}
<<<<<<< MINE
protected Connection sendCommand(final Command cmd, final String... args) {
final byte[][] bargs = new byte[args.length][];
for(int i=0; i < args.length; i++) {
bargs[i] = args[i].getBytes(Protocol.UTF8);
}
return sendCommand(cmd, bargs);
}
protected Connection sendCommand(final Command cmd, final byte[]... args) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, args);
=======
protected Connection sendCommand(String name, String... args) {
protocol.sendCommand(outputStream, name, args);
>>>>>>> YOURS
pipelinedCommands++;
return this;
}
protected Connection sendCommand(final Command cmd) {
try {
connect();
} catch (UnknownHostException e) {
throw new JedisException("Could not connect to redis-server", e);
} catch (IOException e) {
throw new JedisException("Could not connect to redis-server", e);
}
protocol.sendCommand(outputStream, cmd, new byte[0][]);
pipelinedCommands++;
return this;
}
public Connection(final String host, final int port) {
super();
this.host = host;
this.port = port;
}
public String getHost() {
return host;
}
public void setHost(final String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(final int port) {
this.port = port;
}
public Connection() {
}
public void connect() throws UnknownHostException, IOException {
if (!isConnected()) {
socket = new Socket(host, port);
socket.setSoTimeout(timeout);
outputStream = new RedisOutputStream(socket.getOutputStream());
inputStream = new RedisInputStream(socket.getInputStream());
}
}
public void disconnect() {
if (isConnected()) {
try {
inputStream.close();
outputStream.close();
if (!socket.isClosed()) {
socket.close();
}
} catch (IOException ex) {
throw new JedisException(ex);
}
}
}
public boolean isConnected() {
return socket != null && socket.isBound() && !socket.isClosed()
&& socket.isConnected() && !socket.isInputShutdown()
&& !socket.isOutputShutdown();
}
protected String getStatusCodeReply() {
pipelinedCommands--;
final byte[] resp = (byte[]) protocol.read(inputStream);
if (null == resp) {
return null;
} else {
return new String(resp, Protocol.UTF8);
}
}
public String getBulkReply() {
final byte[] result = getBinaryBulkReply();
if (null != result) {
return new String(result, Protocol.UTF8);
} else {
return null;
}
}
public byte[] getBinaryBulkReply() {
pipelinedCommands--;
return (byte[]) protocol.read(inputStream);
}
public Integer getIntegerReply() {
pipelinedCommands--;
return (Integer) protocol.read(inputStream);
}
public List<String> getMultiBulkReply() {
final List<byte[]> bresult = getBinaryMultiBulkReply();
if(null == bresult) {
return null;
}
final ArrayList<String> result = new ArrayList<String>(bresult.size());
for(final byte[] barray : bresult) {
if( barray == null) {
result.add(null);
} else {
result.add(new String(barray, Protocol.UTF8));
}
}
return result;
}
@SuppressWarnings("unchecked")
public List<byte[]> getBinaryMultiBulkReply() {
pipelinedCommands--;
return (List<byte[]>) protocol.read(inputStream);
}
@SuppressWarnings("unchecked")
public List<Object> getObjectMultiBulkReply() {
pipelinedCommands--;
return (List<Object>) protocol.read(inputStream);
}
public List<Object> getAll() {
List<Object> all = new ArrayList<Object>();
while (pipelinedCommands > 0) {
all.add(protocol.read(inputStream));
pipelinedCommands--;
}
return all;
}
public Object getOne() {
pipelinedCommands--;
return protocol.read(inputStream);
}
}
Diff Result
No diff
Case 10 - junit.rev_c3873_b33b4.MaxCore.java
void findLeaves
Left modified signature and body
Right modified signature and body
Unstructured reported multiple conflicts
Safe kept both methods
MergeMethods reported conflict including whole method
KeepBothMethods kept both versions
Base
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX= "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (request instanceof SortingRequest) // We'll pay big karma points for this
return request;
List<Description> leaves= findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves)
runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>)null, runners) {};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// This is cheating, because it runs the whole class
// to get the warning for this method, but we can't do better,
// because JUnit 3.8's
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type= each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName= each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results= new ArrayList<Description>();
findLeaves(null, request.getRunner().getDescription(), results);
return results;
}
private void findLeaves(Description parent, Description description, List<Description> results) {
if (description.getChildren().isEmpty())
if (description.toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent));
else
results.add(description);
else
for (Description each : description.getChildren())
findLeaves(description, each, results);
}
}
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX= "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (request instanceof SortingRequest) // We'll pay big karma points for this
return request;
List<Description> leaves= findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves)
runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>)null, runners) {};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// This is cheating, because it runs the whole class
// to get the warning for this method, but we can't do better,
// because JUnit 3.8's
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type= each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName= each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results= new ArrayList<Description>();
findLeaves(null, request.getRunner().getDescription(), results);
return results;
}
private void findLeaves(Description parent, Description description, List<Description> results) {
if (description.getChildren().isEmpty())
if (description.toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent));
else
results.add(description);
else
for (Description each : description.getChildren())
findLeaves(description, each, results);
}
}
Left
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Plan;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX= "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (request instanceof SortingRequest) // We'll pay big karma points for this
return request;
List<Description> leaves= findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves)
runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>)null, runners) {};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// This is cheating, because it runs the whole class
// to get the warning for this method, but we can't do better,
// because JUnit 3.8's
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type= each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName= each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results= new ArrayList<Description>();
findLeaves(null, request.getRunner().getPlan(), results);
return results;
}
private void findLeaves(Description parent, Plan plan, List<Description> results) {
Description description = plan.getDescription();
if (plan.getChildren().isEmpty())
if (description.toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent));
else
results.add(description);
else
for (Plan each : plan.getChildren())
findLeaves(description, each, results);
}
}
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Plan;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX= "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (request instanceof SortingRequest) // We'll pay big karma points for this
return request;
List<Description> leaves= findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves)
runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>)null, runners) {};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// This is cheating, because it runs the whole class
// to get the warning for this method, but we can't do better,
// because JUnit 3.8's
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type= each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName= each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results= new ArrayList<Description>();
findLeaves(null, request.getRunner().getPlan(), results);
return results;
}
private void findLeaves(Description parent, Plan plan, List<Description> results) {
Description description = plan.getDescription();
if (plan.getChildren().isEmpty())
if (description.toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent));
else
results.add(description);
else
for (Plan each : plan.getChildren())
findLeaves(description, each, results);
}
}
Right
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Plan;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX= "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (request instanceof SortingRequest) // We'll pay big karma points for this
return request;
List<Description> leaves= findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves)
runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>)null, runners) {};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// This is cheating, because it runs the whole class
// to get the warning for this method, but we can't do better,
// because JUnit 3.8's
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type= each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName= each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results= new ArrayList<Description>();
findLeaves(null, request.getRunner().getPlan(), results);
return results;
}
private void findLeaves(Plan parent, Plan plan, List<Description> results) {
if (plan.getChildren().isEmpty())
if (plan.getDescription().toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent.getDescription()));
else
results.add(plan.getDescription());
else
for (Plan each : plan.getChildren())
findLeaves(plan, each, results);
}
}
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Plan;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX= "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (request instanceof SortingRequest) // We'll pay big karma points for this
return request;
List<Description> leaves= findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves)
runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>)null, runners) {};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// This is cheating, because it runs the whole class
// to get the warning for this method, but we can't do better,
// because JUnit 3.8's
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type= each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName= each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results= new ArrayList<Description>();
findLeaves(null, request.getRunner().getPlan(), results);
return results;
}
private void findLeaves(Plan parent, Plan plan, List<Description> results) {
if (plan.getChildren().isEmpty())
if (plan.getDescription().toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent.getDescription()));
else
results.add(plan.getDescription());
else
for (Plan each : plan.getChildren())
findLeaves(plan, each, results);
}
}
MergeMethods
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Plan;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX= "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (request instanceof SortingRequest) // We'll pay big karma points for this
return request;
List<Description> leaves= findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves)
runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>)null, runners) {};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// This is cheating, because it runs the whole class
// to get the warning for this method, but we can't do better,
// because JUnit 3.8's
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type= each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName= each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results= new ArrayList<Description>();
findLeaves(null, request.getRunner().getPlan(), results);
return results;
}
<<<<<<< MINE
private void findLeaves(Description parent, Plan plan, List<Description> results) {
Description description = plan.getDescription();
if (plan.getChildren().isEmpty())
if (description.toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent));
else
results.add(description);
else
for (Plan each : plan.getChildren())
findLeaves(description, each, results);
}
=======
private void findLeaves(Plan parent, Plan plan, List<Description> results) {
if (plan.getChildren().isEmpty())
if (plan.getDescription().toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent.getDescription()));
else
results.add(plan.getDescription());
else
for (Plan each : plan.getChildren())
findLeaves(plan, each, results);
}
>>>>>>> YOURS
}
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Plan;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX= "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (request instanceof SortingRequest) // We'll pay big karma points for this
return request;
List<Description> leaves= findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves)
runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>)null, runners) {};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// This is cheating, because it runs the whole class
// to get the warning for this method, but we can't do better,
// because JUnit 3.8's
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type= each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName= each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results= new ArrayList<Description>();
findLeaves(null, request.getRunner().getPlan(), results);
return results;
}
<<<<<<< MINE
private void findLeaves(Description parent, Plan plan, List<Description> results) {
Description description = plan.getDescription();
if (plan.getChildren().isEmpty())
if (description.toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent));
else
results.add(description);
else
for (Plan each : plan.getChildren())
findLeaves(description, each, results);
}
=======
private void findLeaves(Plan parent, Plan plan, List<Description> results) {
if (plan.getChildren().isEmpty())
if (plan.getDescription().toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent.getDescription()));
else
results.add(plan.getDescription());
else
for (Plan each : plan.getChildren())
findLeaves(plan, each, results);
}
>>>>>>> YOURS
}
KeepBothMethods
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Plan;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX = "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (// We'll pay big karma points for this
request instanceof SortingRequest)
return request;
List<Description> leaves = findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves) runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>) null, runners) {
};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type = each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName = each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results = new ArrayList<Description>();
findLeaves(null, request.getRunner().getPlan(), results);
return results;
}
private void findLeaves(Description parent, Plan plan, List<Description> results) {
Description description = plan.getDescription();
if (plan.getChildren().isEmpty())
if (description.toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent));
else
results.add(description);
else
for (Plan each : plan.getChildren()) findLeaves(description, each, results);
}
private void findLeaves(Plan parent, Plan plan, List<Description> results) {
if (plan.getChildren().isEmpty())
if (plan.getDescription().toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent.getDescription()));
else
results.add(plan.getDescription());
else
for (Plan each : plan.getChildren()) findLeaves(plan, each, results);
}
}
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Plan;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX = "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (// We'll pay big karma points for this
request instanceof SortingRequest)
return request;
List<Description> leaves = findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves) runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>) null, runners) {
};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type = each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName = each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results = new ArrayList<Description>();
findLeaves(null, request.getRunner().getPlan(), results);
return results;
}
private void findLeaves(Description parent, Plan plan, List<Description> results) {
Description description = plan.getDescription();
if (plan.getChildren().isEmpty())
if (description.toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent));
else
results.add(description);
else
for (Plan each : plan.getChildren()) findLeaves(description, each, results);
}
private void findLeaves(Plan parent, Plan plan, List<Description> results) {
if (plan.getChildren().isEmpty())
if (plan.getDescription().toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent.getDescription()));
else
results.add(plan.getDescription());
else
for (Plan each : plan.getChildren()) findLeaves(plan, each, results);
}
}
Safe
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Plan;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX = "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (// We'll pay big karma points for this
request instanceof SortingRequest)
return request;
List<Description> leaves = findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves) runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>) null, runners) {
};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type = each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName = each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results = new ArrayList<Description>();
findLeaves(null, request.getRunner().getPlan(), results);
return results;
}
private void findLeaves(Description parent, Plan plan, List<Description> results) {
Description description = plan.getDescription();
if (plan.getChildren().isEmpty())
if (description.toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent));
else
results.add(description);
else
for (Plan each : plan.getChildren()) findLeaves(description, each, results);
}
private void findLeaves(Plan parent, Plan plan, List<Description> results) {
if (plan.getChildren().isEmpty())
if (plan.getDescription().toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent.getDescription()));
else
results.add(plan.getDescription());
else
for (Plan each : plan.getChildren()) findLeaves(plan, each, results);
}
}
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Plan;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX = "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (// We'll pay big karma points for this
request instanceof SortingRequest)
return request;
List<Description> leaves = findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves) runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>) null, runners) {
};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type = each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName = each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results = new ArrayList<Description>();
findLeaves(null, request.getRunner().getPlan(), results);
return results;
}
private void findLeaves(Description parent, Plan plan, List<Description> results) {
Description description = plan.getDescription();
if (plan.getChildren().isEmpty())
if (description.toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent));
else
results.add(description);
else
for (Plan each : plan.getChildren()) findLeaves(description, each, results);
}
private void findLeaves(Plan parent, Plan plan, List<Description> results) {
if (plan.getChildren().isEmpty())
if (plan.getDescription().toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent.getDescription()));
else
results.add(plan.getDescription());
else
for (Plan each : plan.getChildren()) findLeaves(plan, each, results);
}
}
Unstructured
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Plan;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX= "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (request instanceof SortingRequest) // We'll pay big karma points for this
return request;
List<Description> leaves= findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves)
runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>)null, runners) {};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// This is cheating, because it runs the whole class
// to get the warning for this method, but we can't do better,
// because JUnit 3.8's
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type= each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName= each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results= new ArrayList<Description>();
findLeaves(null, request.getRunner().getPlan(), results);
return results;
}
<<<<<<< MINE
private void findLeaves(Description parent, Plan plan, List<Description> results) {
Description description = plan.getDescription();
if (plan.getChildren().isEmpty())
if (description.toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent));
=======
private void findLeaves(Plan parent, Plan plan, List<Description> results) {
if (plan.getChildren().isEmpty())
if (plan.getDescription().toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent.getDescription()));
>>>>>>> YOURS
else
results.add(plan.getDescription());
else
for (Plan each : plan.getChildren())
<<<<<<< MINE
findLeaves(description, each, results);
=======
findLeaves(plan, each, results);
>>>>>>> YOURS
}
}
package org.junit.experimental.max;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import junit.framework.TestSuite;
import org.junit.internal.requests.SortingRequest;
import org.junit.internal.runners.ErrorReportingRunner;
import org.junit.internal.runners.JUnit38ClassRunner;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Plan;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.Runner;
import org.junit.runners.Suite;
import org.junit.runners.model.InitializationError;
/**
* A replacement for JUnitCore, which keeps track of runtime and failure history, and reorders tests
* to maximize the chances that a failing test occurs early in the test run.
*
* The rules for sorting are:
* <ol>
* <li> Never-run tests first, in arbitrary order
* <li> Group remaining tests by the date at which they most recently failed.
* <li> Sort groups such that the most recent failure date is first, and never-failing tests are at the end.
* <li> Within a group, run the fastest tests first.
* </ol>
*/
public class MaxCore {
private static final String MALFORMED_JUNIT_3_TEST_CLASS_PREFIX= "malformed JUnit 3 test class: ";
/**
* Create a new MaxCore from a serialized file stored at storedResults
* @deprecated use storedLocally()
*/
@Deprecated
public static MaxCore forFolder(String folderName) {
return storedLocally(new File(folderName));
}
/**
* Create a new MaxCore from a serialized file stored at storedResults
*/
public static MaxCore storedLocally(File storedResults) {
return new MaxCore(storedResults);
}
private final MaxHistory fHistory;
private MaxCore(File storedResults) {
fHistory = MaxHistory.forFolder(storedResults);
}
/**
* Run all the tests in <code>class</code>.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Class<?> testClass) {
return run(Request.aClass(testClass));
}
/**
* Run all the tests contained in <code>request</code>.
* @param request the request describing tests
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request) {
return run(request, new JUnitCore());
}
/**
* Run all the tests contained in <code>request</code>.
*
* This variant should be used if {@code core} has attached listeners that this
* run should notify.
*
* @param request the request describing tests
* @param core a JUnitCore to delegate to.
* @return a {@link Result} describing the details of the test run and the failed tests.
*/
public Result run(Request request, JUnitCore core) {
core.addListener(fHistory.listener());
return core.run(sortRequest(request).getRunner());
}
/**
* @param request
* @return a new Request, which contains all of the same tests, but in a new order.
*/
public Request sortRequest(Request request) {
if (request instanceof SortingRequest) // We'll pay big karma points for this
return request;
List<Description> leaves= findLeaves(request);
Collections.sort(leaves, fHistory.testComparator());
return constructLeafRequest(leaves);
}
private Request constructLeafRequest(List<Description> leaves) {
final List<Runner> runners = new ArrayList<Runner>();
for (Description each : leaves)
runners.add(buildRunner(each));
return new Request() {
@Override
public Runner getRunner() {
try {
return new Suite((Class<?>)null, runners) {};
} catch (InitializationError e) {
return new ErrorReportingRunner(null, e);
}
}
};
}
private Runner buildRunner(Description each) {
if (each.toString().equals("TestSuite with 0 tests"))
return Suite.emptySuite();
if (each.toString().startsWith(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX))
// This is cheating, because it runs the whole class
// to get the warning for this method, but we can't do better,
// because JUnit 3.8's
// thrown away which method the warning is for.
return new JUnit38ClassRunner(new TestSuite(getMalformedTestClass(each)));
Class<?> type= each.getTestClass();
if (type == null)
throw new RuntimeException("Can't build a runner from description [" + each + "]");
String methodName= each.getMethodName();
if (methodName == null)
return Request.aClass(type).getRunner();
return Request.method(type, methodName).getRunner();
}
private Class<?> getMalformedTestClass(Description each) {
try {
return Class.forName(each.toString().replace(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX, ""));
} catch (ClassNotFoundException e) {
return null;
}
}
/**
* @param request a request to run
* @return a list of method-level tests to run, sorted in the order
* specified in the class comment.
*/
public List<Description> sortedLeavesForTest(Request request) {
return findLeaves(sortRequest(request));
}
private List<Description> findLeaves(Request request) {
List<Description> results= new ArrayList<Description>();
findLeaves(null, request.getRunner().getPlan(), results);
return results;
}
<<<<<<< MINE
private void findLeaves(Description parent, Plan plan, List<Description> results) {
Description description = plan.getDescription();
if (plan.getChildren().isEmpty())
if (description.toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent));
=======
private void findLeaves(Plan parent, Plan plan, List<Description> results) {
if (plan.getChildren().isEmpty())
if (plan.getDescription().toString().equals("warning(junit.framework.TestSuite$1)"))
results.add(Description.createSuiteDescription(MALFORMED_JUNIT_3_TEST_CLASS_PREFIX + parent.getDescription()));
>>>>>>> YOURS
else
results.add(plan.getDescription());
else
for (Plan each : plan.getChildren())
<<<<<<< MINE
findLeaves(description, each, results);
=======
findLeaves(plan, each, results);
>>>>>>> YOURS
}
}
Diff Result
No diff
Case 11 - lucenesolr.rev_a90cb_776df.SegmentWriteState.java
public SegmentWriteState
Left modified signature and body
Right modified signature and body
Unstructured reported conflict on signature parameters and merged bodies
Safe kept both methods
MergeMethods reported conflict including whole method
KeepBothMethods kept both versions
Base
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import org.apache.lucene.store.Directory;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval; // TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs) {
this.infoStream = infoStream;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
codecId = "";
}
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
this.codecId = codecId;
}
}
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import org.apache.lucene.store.Directory;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval; // TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs) {
this.infoStream = infoStream;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
codecId = "";
}
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
this.codecId = codecId;
}
}
Left
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitVector;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
// Deletes to apply while we are flushing the segment. A
// Term is enrolled in here if it was deleted at one
// point, and it's mapped to the docIDUpto, meaning any
// docID < docIDUpto containing this term should be
// deleted.
public final BufferedDeletes segDeletes;
// Lazily created:
public BitVector deletedDocs;
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval; // TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, BufferedDeletes segDeletes) {
this.infoStream = infoStream;
this.segDeletes = segDeletes;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
codecId = "";
}
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
this.codecId = codecId;
segDeletes = state.segDeletes;
}
}
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitVector;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
// Deletes to apply while we are flushing the segment. A
// Term is enrolled in here if it was deleted at one
// point, and it's mapped to the docIDUpto, meaning any
// docID < docIDUpto containing this term should be
// deleted.
public final BufferedDeletes segDeletes;
// Lazily created:
public BitVector deletedDocs;
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval; // TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, BufferedDeletes segDeletes) {
this.infoStream = infoStream;
this.segDeletes = segDeletes;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
codecId = "";
}
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
this.codecId = codecId;
segDeletes = state.segDeletes;
}
}
Right
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.store.Directory;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
public final Collection<String> flushedFiles;
public final AtomicLong bytesUsed;
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval; // TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, AtomicLong bytesUsed) {
this.infoStream = infoStream;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
flushedFiles = new HashSet<String>();
codecId = "";
this.bytesUsed = bytesUsed;
}
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
flushedFiles = state.flushedFiles;
this.codecId = codecId;
bytesUsed = state.bytesUsed;
}
}
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.store.Directory;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
public final Collection<String> flushedFiles;
public final AtomicLong bytesUsed;
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval; // TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, AtomicLong bytesUsed) {
this.infoStream = infoStream;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
flushedFiles = new HashSet<String>();
codecId = "";
this.bytesUsed = bytesUsed;
}
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
flushedFiles = state.flushedFiles;
this.codecId = codecId;
bytesUsed = state.bytesUsed;
}
}
MergeMethods
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitVector;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
// Deletes to apply while we are flushing the segment. A
// Term is enrolled in here if it was deleted at one
// point, and it's mapped to the docIDUpto, meaning any
// docID < docIDUpto containing this term should be
// deleted.
public final BufferedDeletes segDeletes;
// Lazily created:
public BitVector deletedDocs;
public final Collection<String> flushedFiles;
public final AtomicLong bytesUsed;
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval;
// TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
<<<<<<< MINE
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, BufferedDeletes segDeletes) {
this.infoStream = infoStream;
this.segDeletes = segDeletes;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
codecId = "";
}
=======
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, AtomicLong bytesUsed) {
this.infoStream = infoStream;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
flushedFiles = new HashSet<String>();
codecId = "";
this.bytesUsed = bytesUsed;
}
>>>>>>> YOURS
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
flushedFiles = state.flushedFiles;
this.codecId = codecId;
<<<<<<< MINE
segDeletes = state.segDeletes;
=======
bytesUsed = state.bytesUsed;
>>>>>>> YOURS
}
}
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitVector;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
// Deletes to apply while we are flushing the segment. A
// Term is enrolled in here if it was deleted at one
// point, and it's mapped to the docIDUpto, meaning any
// docID < docIDUpto containing this term should be
// deleted.
public final BufferedDeletes segDeletes;
// Lazily created:
public BitVector deletedDocs;
public final Collection<String> flushedFiles;
public final AtomicLong bytesUsed;
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval;
// TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
<<<<<<< MINE
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, BufferedDeletes segDeletes) {
this.infoStream = infoStream;
this.segDeletes = segDeletes;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
codecId = "";
}
=======
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, AtomicLong bytesUsed) {
this.infoStream = infoStream;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
flushedFiles = new HashSet<String>();
codecId = "";
this.bytesUsed = bytesUsed;
}
>>>>>>> YOURS
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
flushedFiles = state.flushedFiles;
this.codecId = codecId;
<<<<<<< MINE
segDeletes = state.segDeletes;
=======
bytesUsed = state.bytesUsed;
>>>>>>> YOURS
}
}
KeepBothMethods
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitVector;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
// Deletes to apply while we are flushing the segment. A
// Term is enrolled in here if it was deleted at one
// point, and it's mapped to the docIDUpto, meaning any
// docID < docIDUpto containing this term should be
// deleted.
public final BufferedDeletes segDeletes;
// Lazily created:
public BitVector deletedDocs;
public final Collection<String> flushedFiles;
public final AtomicLong bytesUsed;
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval;
// TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, BufferedDeletes segDeletes) {
this.infoStream = infoStream;
this.segDeletes = segDeletes;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
codecId = "";
}
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, AtomicLong bytesUsed) {
this.infoStream = infoStream;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
flushedFiles = new HashSet<String>();
codecId = "";
this.bytesUsed = bytesUsed;
}
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
flushedFiles = state.flushedFiles;
this.codecId = codecId;
<<<<<<< MINE
segDeletes = state.segDeletes;
=======
bytesUsed = state.bytesUsed;
>>>>>>> YOURS
}
}
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitVector;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
// Deletes to apply while we are flushing the segment. A
// Term is enrolled in here if it was deleted at one
// point, and it's mapped to the docIDUpto, meaning any
// docID < docIDUpto containing this term should be
// deleted.
public final BufferedDeletes segDeletes;
// Lazily created:
public BitVector deletedDocs;
public final Collection<String> flushedFiles;
public final AtomicLong bytesUsed;
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval;
// TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, BufferedDeletes segDeletes) {
this.infoStream = infoStream;
this.segDeletes = segDeletes;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
codecId = "";
}
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, AtomicLong bytesUsed) {
this.infoStream = infoStream;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
flushedFiles = new HashSet<String>();
codecId = "";
this.bytesUsed = bytesUsed;
}
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
flushedFiles = state.flushedFiles;
this.codecId = codecId;
<<<<<<< MINE
segDeletes = state.segDeletes;
=======
bytesUsed = state.bytesUsed;
>>>>>>> YOURS
}
}
Safe
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitVector;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
// Deletes to apply while we are flushing the segment. A
// Term is enrolled in here if it was deleted at one
// point, and it's mapped to the docIDUpto, meaning any
// docID < docIDUpto containing this term should be
// deleted.
public final BufferedDeletes segDeletes;
// Lazily created:
public BitVector deletedDocs;
public final Collection<String> flushedFiles;
public final AtomicLong bytesUsed;
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval;
// TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, BufferedDeletes segDeletes) {
this.infoStream = infoStream;
this.segDeletes = segDeletes;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
codecId = "";
}
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, AtomicLong bytesUsed) {
this.infoStream = infoStream;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
flushedFiles = new HashSet<String>();
codecId = "";
this.bytesUsed = bytesUsed;
}
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
flushedFiles = state.flushedFiles;
this.codecId = codecId;
<<<<<<< MINE
segDeletes = state.segDeletes;
=======
bytesUsed = state.bytesUsed;
>>>>>>> YOURS
}
}
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitVector;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
// Deletes to apply while we are flushing the segment. A
// Term is enrolled in here if it was deleted at one
// point, and it's mapped to the docIDUpto, meaning any
// docID < docIDUpto containing this term should be
// deleted.
public final BufferedDeletes segDeletes;
// Lazily created:
public BitVector deletedDocs;
public final Collection<String> flushedFiles;
public final AtomicLong bytesUsed;
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval;
// TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, BufferedDeletes segDeletes) {
this.infoStream = infoStream;
this.segDeletes = segDeletes;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
codecId = "";
}
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, AtomicLong bytesUsed) {
this.infoStream = infoStream;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
flushedFiles = new HashSet<String>();
codecId = "";
this.bytesUsed = bytesUsed;
}
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
flushedFiles = state.flushedFiles;
this.codecId = codecId;
<<<<<<< MINE
segDeletes = state.segDeletes;
=======
bytesUsed = state.bytesUsed;
>>>>>>> YOURS
}
}
Unstructured
package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitVector;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
<<<<<<< MINE
// Deletes to apply while we are flushing the segment. A
// Term is enrolled in here if it was deleted at one
// point, and it's mapped to the docIDUpto, meaning any
// docID < docIDUpto containing this term should be
// deleted.
public final BufferedDeletes segDeletes;
// Lazily created:
public BitVector deletedDocs;
=======
public final Collection<String> flushedFiles;
public final AtomicLong bytesUsed;
>>>>>>> YOURS
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval; // TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
<<<<<<< MINE
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, BufferedDeletes segDeletes) {
=======
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, AtomicLong bytesUsed) {
>>>>>>> YOURS
this.infoStream = infoStream;
this.segDeletes = segDeletes;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
flushedFiles = new HashSet<String>();
codecId = "";
this.bytesUsed = bytesUsed;
}
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
flushedFiles = state.flushedFiles;
this.codecId = codecId;
<<<<<<< MINE
segDeletes = state.segDeletes;
=======
bytesUsed = state.bytesUsed;
>>>>>>> YOURS
}
}package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.PrintStream;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitVector;
/**
* @lucene.experimental
*/
public class SegmentWriteState {
public final PrintStream infoStream;
public final Directory directory;
public final String segmentName;
public final FieldInfos fieldInfos;
public final int numDocs;
public boolean hasVectors;
<<<<<<< MINE
// Deletes to apply while we are flushing the segment. A
// Term is enrolled in here if it was deleted at one
// point, and it's mapped to the docIDUpto, meaning any
// docID < docIDUpto containing this term should be
// deleted.
public final BufferedDeletes segDeletes;
// Lazily created:
public BitVector deletedDocs;
=======
public final Collection<String> flushedFiles;
public final AtomicLong bytesUsed;
>>>>>>> YOURS
final SegmentCodecs segmentCodecs;
public final String codecId;
/** Expert: The fraction of terms in the "dictionary" which should be stored
* in RAM. Smaller values use more memory, but make searching slightly
* faster, while larger values use less memory and make searching slightly
* slower. Searching is typically not dominated by dictionary lookup, so
* tweaking this is rarely useful.*/
public int termIndexInterval; // TODO: this should be private to the codec, not settable here or in IWC
/** Expert: The fraction of TermDocs entries stored in skip tables,
* used to accelerate {@link DocsEnum#advance(int)}. Larger values result in
* smaller indexes, greater acceleration, but fewer accelerable cases, while
* smaller values result in bigger indexes, less acceleration and more
* accelerable cases. More detailed experiments would be useful here. */
public final int skipInterval = 16;
/** Expert: The maximum number of skip levels. Smaller values result in
* slightly smaller indexes, but slower skipping in big posting lists.
*/
public final int maxSkipLevels = 10;
public SegmentWriteState(PrintStream infoStream, Directory directory, String segmentName, FieldInfos fieldInfos,
<<<<<<< MINE
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, BufferedDeletes segDeletes) {
=======
int numDocs, int termIndexInterval, SegmentCodecs segmentCodecs, AtomicLong bytesUsed) {
>>>>>>> YOURS
this.infoStream = infoStream;
this.segDeletes = segDeletes;
this.directory = directory;
this.segmentName = segmentName;
this.fieldInfos = fieldInfos;
this.numDocs = numDocs;
this.termIndexInterval = termIndexInterval;
this.segmentCodecs = segmentCodecs;
flushedFiles = new HashSet<String>();
codecId = "";
this.bytesUsed = bytesUsed;
}
/**
* Create a shallow {@link SegmentWriteState} copy final a codec ID
*/
SegmentWriteState(SegmentWriteState state, String codecId) {
infoStream = state.infoStream;
directory = state.directory;
segmentName = state.segmentName;
fieldInfos = state.fieldInfos;
numDocs = state.numDocs;
termIndexInterval = state.termIndexInterval;
segmentCodecs = state.segmentCodecs;
flushedFiles = state.flushedFiles;
this.codecId = codecId;
<<<<<<< MINE
segDeletes = state.segDeletes;
=======
bytesUsed = state.bytesUsed;
>>>>>>> YOURS
}
}
Diff Result
No diff
Case 12 - ogplatform.rev_1550f_57348.TimeSeriesSearchRequest.java
void setIdentifiers
Left modified signature parameter type: Collection → List
Right modified signature parameter type: Collection → Set
Unstructured reported conflict only between signatures
Safe reported conflict between whole methods
MergeMethods reported conflict between whole methods
KeepBothMethods kept both versions of method
Base
/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest<T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The timeseries identifier for loading specific data points range
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* Identifier value, will match against the <b>value</b> of the identifiers
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
*/
@PropertyDefinition
private String _identifierValue;
/**
* List of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
*/
@PropertyDefinition
private final List<Identifier> _identifiers = new ArrayList<Identifier>();
/**
* The dataSource, null to search all dataSource.
*/
@PropertyDefinition
private String _dataSource;
/**
* The dataProvider, null to search all dataProvider.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The dataField, null to search all dataField.
*/
@PropertyDefinition
private String _dataField;
/**
* The observationTime, null to search all observationTime
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in datastore.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search till end date in datastore.
*/
@PropertyDefinition
private T _end;
/**
* Set to true if to load datapoints, otherwise return just meta data
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true if to load the start and end date for timeseries
*/
@PropertyDefinition
private boolean _loadDates;
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((List<Identifier>) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((boolean) (Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((boolean) (Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the timeseries identifier for loading specific data points range
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the timeseries identifier for loading specific data points range
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets identifier value, will match against the <b>value</b> of the identifiers
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets identifier value, will match against the <b>value</b> of the identifiers
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets list of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
* @return the value of the property
*/
public List<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets list of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
* @param identifiers the new value of the property
*/
public void setIdentifiers(Collection<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
/**
* Gets the the {@code identifiers} property.
* - no wildcards are allowed
* @return the property, not null
*/
public final Property<List<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the dataSource, null to search all dataSource.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the dataSource, null to search all dataSource.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the dataProvider, null to search all dataProvider.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the dataProvider, null to search all dataProvider.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the dataField, null to search all dataField.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the dataField, null to search all dataField.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observationTime, null to search all observationTime
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observationTime, null to search all observationTime
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in datastore.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in datastore.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search till end date in datastore.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search till end date in datastore.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true if to load datapoints, otherwise return just meta data
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true if to load datapoints, otherwise return just meta data
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true if to load the start and end date for timeseries
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true if to load the start and end date for timeseries
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta<T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("unchecked")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
@SuppressWarnings("unchecked")
private final MetaProperty<List<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) List.class);
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings("unchecked")
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings("unchecked")
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings("unchecked")
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings("unchecked")
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<List<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest<T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The timeseries identifier for loading specific data points range
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* Identifier value, will match against the <b>value</b> of the identifiers
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
*/
@PropertyDefinition
private String _identifierValue;
/**
* List of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
*/
@PropertyDefinition
private final List<Identifier> _identifiers = new ArrayList<Identifier>();
/**
* The dataSource, null to search all dataSource.
*/
@PropertyDefinition
private String _dataSource;
/**
* The dataProvider, null to search all dataProvider.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The dataField, null to search all dataField.
*/
@PropertyDefinition
private String _dataField;
/**
* The observationTime, null to search all observationTime
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in datastore.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search till end date in datastore.
*/
@PropertyDefinition
private T _end;
/**
* Set to true if to load datapoints, otherwise return just meta data
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true if to load the start and end date for timeseries
*/
@PropertyDefinition
private boolean _loadDates;
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((List<Identifier>) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((boolean) (Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((boolean) (Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the timeseries identifier for loading specific data points range
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the timeseries identifier for loading specific data points range
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets identifier value, will match against the <b>value</b> of the identifiers
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets identifier value, will match against the <b>value</b> of the identifiers
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets list of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
* @return the value of the property
*/
public List<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets list of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
* @param identifiers the new value of the property
*/
public void setIdentifiers(Collection<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
/**
* Gets the the {@code identifiers} property.
* - no wildcards are allowed
* @return the property, not null
*/
public final Property<List<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the dataSource, null to search all dataSource.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the dataSource, null to search all dataSource.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the dataProvider, null to search all dataProvider.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the dataProvider, null to search all dataProvider.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the dataField, null to search all dataField.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the dataField, null to search all dataField.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observationTime, null to search all observationTime
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observationTime, null to search all observationTime
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in datastore.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in datastore.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search till end date in datastore.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search till end date in datastore.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true if to load datapoints, otherwise return just meta data
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true if to load datapoints, otherwise return just meta data
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true if to load the start and end date for timeseries
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true if to load the start and end date for timeseries
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta<T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("unchecked")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
@SuppressWarnings("unchecked")
private final MetaProperty<List<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) List.class);
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings("unchecked")
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings("unchecked")
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings("unchecked")
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings("unchecked")
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<List<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
Left
/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest<T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The time series identifier for loading specific data points range.
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* The identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
*/
@PropertyDefinition
private String _identifierValue;
/**
* The identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
*/
@PropertyDefinition
private final List<Identifier> _identifiers = new ArrayList<Identifier>();
/**
* The data source, null to search all data sources.
*/
@PropertyDefinition
private String _dataSource;
/**
* The data provider, null to search all data providers.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The data field to search, null to search all data fields.
*/
@PropertyDefinition
private String _dataField;
/**
* The observation time, null to search all observation times.
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in data store.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search until the end date in data store.
*/
@PropertyDefinition
private T _end;
/**
* Set to true to load data points, otherwise return just meta data.
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true to load the start and end date for time series.
*/
@PropertyDefinition
private boolean _loadDates;
/**
* Creates an instance.
*/
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((List<Identifier>) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the time series identifier for loading specific data points range.
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the time series identifier for loading specific data points range.
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the value of the property
*/
public List<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @param identifiers the new value of the property
*/
public void setIdentifiers(List<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
/**
* Gets the the {@code identifiers} property.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the property, not null
*/
public final Property<List<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data source, null to search all data sources.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the data source, null to search all data sources.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data provider, null to search all data providers.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the data provider, null to search all data providers.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data field to search, null to search all data fields.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the data field to search, null to search all data fields.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observation time, null to search all observation times.
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observation time, null to search all observation times.
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in data store.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in data store.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search until the end date in data store.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search until the end date in data store.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load data points, otherwise return just meta data.
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true to load data points, otherwise return just meta data.
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load the start and end date for time series.
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true to load the start and end date for time series.
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta<T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("rawtypes")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) List.class);
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings({"unchecked", "rawtypes" })
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings({"unchecked", "rawtypes" })
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<List<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest<T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The time series identifier for loading specific data points range.
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* The identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
*/
@PropertyDefinition
private String _identifierValue;
/**
* The identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
*/
@PropertyDefinition
private final List<Identifier> _identifiers = new ArrayList<Identifier>();
/**
* The data source, null to search all data sources.
*/
@PropertyDefinition
private String _dataSource;
/**
* The data provider, null to search all data providers.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The data field to search, null to search all data fields.
*/
@PropertyDefinition
private String _dataField;
/**
* The observation time, null to search all observation times.
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in data store.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search until the end date in data store.
*/
@PropertyDefinition
private T _end;
/**
* Set to true to load data points, otherwise return just meta data.
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true to load the start and end date for time series.
*/
@PropertyDefinition
private boolean _loadDates;
/**
* Creates an instance.
*/
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((List<Identifier>) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the time series identifier for loading specific data points range.
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the time series identifier for loading specific data points range.
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the value of the property
*/
public List<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @param identifiers the new value of the property
*/
public void setIdentifiers(List<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
/**
* Gets the the {@code identifiers} property.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the property, not null
*/
public final Property<List<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data source, null to search all data sources.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the data source, null to search all data sources.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data provider, null to search all data providers.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the data provider, null to search all data providers.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data field to search, null to search all data fields.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the data field to search, null to search all data fields.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observation time, null to search all observation times.
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observation time, null to search all observation times.
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in data store.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in data store.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search until the end date in data store.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search until the end date in data store.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load data points, otherwise return just meta data.
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true to load data points, otherwise return just meta data.
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load the start and end date for time series.
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true to load the start and end date for time series.
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta<T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("rawtypes")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) List.class);
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings({"unchecked", "rawtypes" })
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings({"unchecked", "rawtypes" })
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<List<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
Right
/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.time.calendar.LocalDate;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest<T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The timeseries identifier for loading specific data points range
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* Identifier value, will match against the <b>value</b> of the identifiers
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
*/
@PropertyDefinition
private String _identifierValue;
/**
* List of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
*/
@PropertyDefinition
private final Set<Identifier> _identifiers = new HashSet<Identifier>();
/**
* Current date (if appicalable for identifiers)
*/
@PropertyDefinition
private LocalDate _currentDate;
/**
* The dataSource, null to search all dataSource.
*/
@PropertyDefinition
private String _dataSource;
/**
* The dataProvider, null to search all dataProvider.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The dataField, null to search all dataField.
*/
@PropertyDefinition
private String _dataField;
/**
* The observationTime, null to search all observationTime
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in datastore.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search till end date in datastore.
*/
@PropertyDefinition
private T _end;
/**
* Set to true if to load datapoints, otherwise return just meta data
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true if to load the start and end date for timeseries
*/
@PropertyDefinition
private boolean _loadDates;
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 600751303: // currentDate
return getCurrentDate();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((Set<Identifier>) newValue);
return;
case 600751303: // currentDate
setCurrentDate((LocalDate) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((boolean) (Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((boolean) (Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the timeseries identifier for loading specific data points range
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the timeseries identifier for loading specific data points range
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets identifier value, will match against the <b>value</b> of the identifiers
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets identifier value, will match against the <b>value</b> of the identifiers
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets list of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
* @return the value of the property
*/
public Set<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets list of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
* @param identifiers the new value of the property
*/
public void setIdentifiers(Set<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
/**
* Gets the the {@code identifiers} property.
* - no wildcards are allowed
* @return the property, not null
*/
public final Property<Set<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets current date (if appicalable for identifiers)
* @return the value of the property
*/
public LocalDate getCurrentDate() {
return _currentDate;
}
/**
* Sets current date (if appicalable for identifiers)
* @param currentDate the new value of the property
*/
public void setCurrentDate(LocalDate currentDate) {
this._currentDate = currentDate;
}
/**
* Gets the the {@code currentDate} property.
* @return the property, not null
*/
public final Property<LocalDate> currentDate() {
return metaBean().currentDate().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the dataSource, null to search all dataSource.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the dataSource, null to search all dataSource.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the dataProvider, null to search all dataProvider.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the dataProvider, null to search all dataProvider.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the dataField, null to search all dataField.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the dataField, null to search all dataField.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observationTime, null to search all observationTime
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observationTime, null to search all observationTime
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in datastore.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in datastore.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search till end date in datastore.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search till end date in datastore.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true if to load datapoints, otherwise return just meta data
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true if to load datapoints, otherwise return just meta data
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true if to load the start and end date for timeseries
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true if to load the start and end date for timeseries
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta<T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("unchecked")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
@SuppressWarnings("unchecked")
private final MetaProperty<Set<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) Set.class);
/**
* The meta-property for the {@code currentDate} property.
*/
private final MetaProperty<LocalDate> _currentDate = DirectMetaProperty.ofReadWrite(this, "currentDate", LocalDate.class);
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings("unchecked")
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings("unchecked")
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings("unchecked")
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("currentDate", _currentDate);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings("unchecked")
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<Set<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code currentDate} property.
* @return the meta-property, not null
*/
public final MetaProperty<LocalDate> currentDate() {
return _currentDate;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.time.calendar.LocalDate;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest<T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The timeseries identifier for loading specific data points range
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* Identifier value, will match against the <b>value</b> of the identifiers
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
*/
@PropertyDefinition
private String _identifierValue;
/**
* List of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
*/
@PropertyDefinition
private final Set<Identifier> _identifiers = new HashSet<Identifier>();
/**
* Current date (if appicalable for identifiers)
*/
@PropertyDefinition
private LocalDate _currentDate;
/**
* The dataSource, null to search all dataSource.
*/
@PropertyDefinition
private String _dataSource;
/**
* The dataProvider, null to search all dataProvider.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The dataField, null to search all dataField.
*/
@PropertyDefinition
private String _dataField;
/**
* The observationTime, null to search all observationTime
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in datastore.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search till end date in datastore.
*/
@PropertyDefinition
private T _end;
/**
* Set to true if to load datapoints, otherwise return just meta data
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true if to load the start and end date for timeseries
*/
@PropertyDefinition
private boolean _loadDates;
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 600751303: // currentDate
return getCurrentDate();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((Set<Identifier>) newValue);
return;
case 600751303: // currentDate
setCurrentDate((LocalDate) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((boolean) (Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((boolean) (Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the timeseries identifier for loading specific data points range
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the timeseries identifier for loading specific data points range
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets identifier value, will match against the <b>value</b> of the identifiers
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets identifier value, will match against the <b>value</b> of the identifiers
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* (see Identifier.getValue());
* wildcards allowed;
* will not match on the <b>key</b> of any of the identifiers;
* null to search all identifiers
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets list of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
* @return the value of the property
*/
public Set<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets list of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
* @param identifiers the new value of the property
*/
public void setIdentifiers(Set<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
/**
* Gets the the {@code identifiers} property.
* - no wildcards are allowed
* @return the property, not null
*/
public final Property<Set<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets current date (if appicalable for identifiers)
* @return the value of the property
*/
public LocalDate getCurrentDate() {
return _currentDate;
}
/**
* Sets current date (if appicalable for identifiers)
* @param currentDate the new value of the property
*/
public void setCurrentDate(LocalDate currentDate) {
this._currentDate = currentDate;
}
/**
* Gets the the {@code currentDate} property.
* @return the property, not null
*/
public final Property<LocalDate> currentDate() {
return metaBean().currentDate().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the dataSource, null to search all dataSource.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the dataSource, null to search all dataSource.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the dataProvider, null to search all dataProvider.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the dataProvider, null to search all dataProvider.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the dataField, null to search all dataField.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the dataField, null to search all dataField.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observationTime, null to search all observationTime
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observationTime, null to search all observationTime
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in datastore.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in datastore.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search till end date in datastore.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search till end date in datastore.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true if to load datapoints, otherwise return just meta data
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true if to load datapoints, otherwise return just meta data
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true if to load the start and end date for timeseries
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true if to load the start and end date for timeseries
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta<T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("unchecked")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
@SuppressWarnings("unchecked")
private final MetaProperty<Set<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) Set.class);
/**
* The meta-property for the {@code currentDate} property.
*/
private final MetaProperty<LocalDate> _currentDate = DirectMetaProperty.ofReadWrite(this, "currentDate", LocalDate.class);
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings("unchecked")
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings("unchecked")
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings("unchecked")
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("currentDate", _currentDate);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings("unchecked")
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<Set<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code currentDate} property.
* @return the meta-property, not null
*/
public final MetaProperty<LocalDate> currentDate() {
return _currentDate;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
MergeMethods
/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.time.calendar.LocalDate;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest <T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The time series identifier for loading specific data points range.
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* The identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
*/
@PropertyDefinition
private String _identifierValue;
/**
* The identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
*/
@PropertyDefinition
private final Set<Identifier> _identifiers = new HashSet<Identifier>();
/**
* Current date (if appicalable for identifiers)
*/
@PropertyDefinition
private LocalDate _currentDate;
/**
* The data source, null to search all data sources.
*/
@PropertyDefinition
private String _dataSource;
/**
* The data provider, null to search all data providers.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The data field to search, null to search all data fields.
*/
@PropertyDefinition
private String _dataField;
/**
* The observation time, null to search all observation times.
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in data store.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search until the end date in data store.
*/
@PropertyDefinition
private T _end;
/**
* Set to true to load data points, otherwise return just meta data.
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true to load the start and end date for time series.
*/
@PropertyDefinition
private boolean _loadDates;
/**
* Creates an instance.
*/
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 600751303: // currentDate
return getCurrentDate();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((Set<Identifier>) newValue);
return;
case 600751303: // currentDate
setCurrentDate((LocalDate) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the time series identifier for loading specific data points range.
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the time series identifier for loading specific data points range.
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the value of the property
*/
public Set<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @param identifiers the new value of the property
*/
<<<<<<< MINE
public void setIdentifiers(List<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
=======
public void setIdentifiers(Set<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
>>>>>>> YOURS
/**
* Gets the the {@code identifiers} property.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the property, not null
*/
public final Property<Set<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets current date (if appicalable for identifiers)
* @return the value of the property
*/
public LocalDate getCurrentDate() {
return _currentDate;
}
/**
* Sets current date (if appicalable for identifiers)
* @param currentDate the new value of the property
*/
public void setCurrentDate(LocalDate currentDate) {
this._currentDate = currentDate;
}
/**
* Gets the the {@code currentDate} property.
* @return the property, not null
*/
public final Property<LocalDate> currentDate() {
return metaBean().currentDate().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data source, null to search all data sources.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the data source, null to search all data sources.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data provider, null to search all data providers.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the data provider, null to search all data providers.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data field to search, null to search all data fields.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the data field to search, null to search all data fields.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observation time, null to search all observation times.
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observation time, null to search all observation times.
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in data store.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in data store.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search until the end date in data store.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search until the end date in data store.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load data points, otherwise return just meta data.
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true to load data points, otherwise return just meta data.
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load the start and end date for time series.
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true to load the start and end date for time series.
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta <T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("rawtypes")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
<<<<<<< MINE
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) List.class);
=======
@SuppressWarnings("unchecked")
private final MetaProperty<Set<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) Set.class);
>>>>>>> YOURS
/**
* The meta-property for the {@code currentDate} property.
*/
private final MetaProperty<LocalDate> _currentDate = DirectMetaProperty.ofReadWrite(this, "currentDate", LocalDate.class);
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings({"unchecked", "rawtypes" })
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("currentDate", _currentDate);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings({"unchecked", "rawtypes" })
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<Set<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code currentDate} property.
* @return the meta-property, not null
*/
public final MetaProperty<LocalDate> currentDate() {
return _currentDate;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
}
/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.time.calendar.LocalDate;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest <T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The time series identifier for loading specific data points range.
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* The identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
*/
@PropertyDefinition
private String _identifierValue;
/**
* The identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
*/
@PropertyDefinition
private final Set<Identifier> _identifiers = new HashSet<Identifier>();
/**
* Current date (if appicalable for identifiers)
*/
@PropertyDefinition
private LocalDate _currentDate;
/**
* The data source, null to search all data sources.
*/
@PropertyDefinition
private String _dataSource;
/**
* The data provider, null to search all data providers.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The data field to search, null to search all data fields.
*/
@PropertyDefinition
private String _dataField;
/**
* The observation time, null to search all observation times.
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in data store.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search until the end date in data store.
*/
@PropertyDefinition
private T _end;
/**
* Set to true to load data points, otherwise return just meta data.
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true to load the start and end date for time series.
*/
@PropertyDefinition
private boolean _loadDates;
/**
* Creates an instance.
*/
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 600751303: // currentDate
return getCurrentDate();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((Set<Identifier>) newValue);
return;
case 600751303: // currentDate
setCurrentDate((LocalDate) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the time series identifier for loading specific data points range.
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the time series identifier for loading specific data points range.
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the value of the property
*/
public Set<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @param identifiers the new value of the property
*/
<<<<<<< MINE
public void setIdentifiers(List<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
=======
public void setIdentifiers(Set<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
>>>>>>> YOURS
/**
* Gets the the {@code identifiers} property.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the property, not null
*/
public final Property<Set<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets current date (if appicalable for identifiers)
* @return the value of the property
*/
public LocalDate getCurrentDate() {
return _currentDate;
}
/**
* Sets current date (if appicalable for identifiers)
* @param currentDate the new value of the property
*/
public void setCurrentDate(LocalDate currentDate) {
this._currentDate = currentDate;
}
/**
* Gets the the {@code currentDate} property.
* @return the property, not null
*/
public final Property<LocalDate> currentDate() {
return metaBean().currentDate().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data source, null to search all data sources.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the data source, null to search all data sources.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data provider, null to search all data providers.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the data provider, null to search all data providers.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data field to search, null to search all data fields.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the data field to search, null to search all data fields.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observation time, null to search all observation times.
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observation time, null to search all observation times.
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in data store.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in data store.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search until the end date in data store.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search until the end date in data store.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load data points, otherwise return just meta data.
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true to load data points, otherwise return just meta data.
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load the start and end date for time series.
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true to load the start and end date for time series.
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta <T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("rawtypes")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
<<<<<<< MINE
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) List.class);
=======
@SuppressWarnings("unchecked")
private final MetaProperty<Set<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) Set.class);
>>>>>>> YOURS
/**
* The meta-property for the {@code currentDate} property.
*/
private final MetaProperty<LocalDate> _currentDate = DirectMetaProperty.ofReadWrite(this, "currentDate", LocalDate.class);
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings({"unchecked", "rawtypes" })
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("currentDate", _currentDate);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings({"unchecked", "rawtypes" })
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<Set<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code currentDate} property.
* @return the meta-property, not null
*/
public final MetaProperty<LocalDate> currentDate() {
return _currentDate;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
}
KeepBothMethods
/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.time.calendar.LocalDate;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest <T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The time series identifier for loading specific data points range.
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* The identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
*/
@PropertyDefinition
private String _identifierValue;
/**
* The identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
*/
@PropertyDefinition
private final Set<Identifier> _identifiers = new HashSet<Identifier>();
/**
* Current date (if appicalable for identifiers)
*/
@PropertyDefinition
private LocalDate _currentDate;
/**
* The data source, null to search all data sources.
*/
@PropertyDefinition
private String _dataSource;
/**
* The data provider, null to search all data providers.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The data field to search, null to search all data fields.
*/
@PropertyDefinition
private String _dataField;
/**
* The observation time, null to search all observation times.
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in data store.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search until the end date in data store.
*/
@PropertyDefinition
private T _end;
/**
* Set to true to load data points, otherwise return just meta data.
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true to load the start and end date for time series.
*/
@PropertyDefinition
private boolean _loadDates;
/**
* Creates an instance.
*/
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 600751303: // currentDate
return getCurrentDate();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((Set<Identifier>) newValue);
return;
case 600751303: // currentDate
setCurrentDate((LocalDate) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the time series identifier for loading specific data points range.
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the time series identifier for loading specific data points range.
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the value of the property
*/
public Set<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @param identifiers the new value of the property
*/
public void setIdentifiers(List<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
/**
* Sets list of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
* @param identifiers the new value of the property
*/
public void setIdentifiers(Set<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
/**
* Gets the the {@code identifiers} property.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the property, not null
*/
public final Property<Set<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets current date (if appicalable for identifiers)
* @return the value of the property
*/
public LocalDate getCurrentDate() {
return _currentDate;
}
/**
* Sets current date (if appicalable for identifiers)
* @param currentDate the new value of the property
*/
public void setCurrentDate(LocalDate currentDate) {
this._currentDate = currentDate;
}
/**
* Gets the the {@code currentDate} property.
* @return the property, not null
*/
public final Property<LocalDate> currentDate() {
return metaBean().currentDate().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data source, null to search all data sources.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the data source, null to search all data sources.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data provider, null to search all data providers.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the data provider, null to search all data providers.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data field to search, null to search all data fields.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the data field to search, null to search all data fields.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observation time, null to search all observation times.
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observation time, null to search all observation times.
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in data store.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in data store.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search until the end date in data store.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search until the end date in data store.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load data points, otherwise return just meta data.
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true to load data points, otherwise return just meta data.
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load the start and end date for time series.
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true to load the start and end date for time series.
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta <T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("rawtypes")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
<<<<<<< MINE
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) List.class);
=======
@SuppressWarnings("unchecked")
private final MetaProperty<Set<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) Set.class);
>>>>>>> YOURS
/**
* The meta-property for the {@code currentDate} property.
*/
private final MetaProperty<LocalDate> _currentDate = DirectMetaProperty.ofReadWrite(this, "currentDate", LocalDate.class);
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings({"unchecked", "rawtypes" })
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("currentDate", _currentDate);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings({"unchecked", "rawtypes" })
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<Set<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code currentDate} property.
* @return the meta-property, not null
*/
public final MetaProperty<LocalDate> currentDate() {
return _currentDate;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
}
/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.time.calendar.LocalDate;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest <T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The time series identifier for loading specific data points range.
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* The identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
*/
@PropertyDefinition
private String _identifierValue;
/**
* The identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
*/
@PropertyDefinition
private final Set<Identifier> _identifiers = new HashSet<Identifier>();
/**
* Current date (if appicalable for identifiers)
*/
@PropertyDefinition
private LocalDate _currentDate;
/**
* The data source, null to search all data sources.
*/
@PropertyDefinition
private String _dataSource;
/**
* The data provider, null to search all data providers.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The data field to search, null to search all data fields.
*/
@PropertyDefinition
private String _dataField;
/**
* The observation time, null to search all observation times.
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in data store.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search until the end date in data store.
*/
@PropertyDefinition
private T _end;
/**
* Set to true to load data points, otherwise return just meta data.
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true to load the start and end date for time series.
*/
@PropertyDefinition
private boolean _loadDates;
/**
* Creates an instance.
*/
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 600751303: // currentDate
return getCurrentDate();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((Set<Identifier>) newValue);
return;
case 600751303: // currentDate
setCurrentDate((LocalDate) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the time series identifier for loading specific data points range.
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the time series identifier for loading specific data points range.
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the value of the property
*/
public Set<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @param identifiers the new value of the property
*/
public void setIdentifiers(List<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
/**
* Sets list of Identifiers to search. Unlike _identifierValue, requires exact match
* - no wildcards are allowed
* @param identifiers the new value of the property
*/
public void setIdentifiers(Set<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
/**
* Gets the the {@code identifiers} property.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the property, not null
*/
public final Property<Set<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets current date (if appicalable for identifiers)
* @return the value of the property
*/
public LocalDate getCurrentDate() {
return _currentDate;
}
/**
* Sets current date (if appicalable for identifiers)
* @param currentDate the new value of the property
*/
public void setCurrentDate(LocalDate currentDate) {
this._currentDate = currentDate;
}
/**
* Gets the the {@code currentDate} property.
* @return the property, not null
*/
public final Property<LocalDate> currentDate() {
return metaBean().currentDate().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data source, null to search all data sources.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the data source, null to search all data sources.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data provider, null to search all data providers.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the data provider, null to search all data providers.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data field to search, null to search all data fields.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the data field to search, null to search all data fields.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observation time, null to search all observation times.
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observation time, null to search all observation times.
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in data store.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in data store.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search until the end date in data store.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search until the end date in data store.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load data points, otherwise return just meta data.
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true to load data points, otherwise return just meta data.
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load the start and end date for time series.
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true to load the start and end date for time series.
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta <T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("rawtypes")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
<<<<<<< MINE
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) List.class);
=======
@SuppressWarnings("unchecked")
private final MetaProperty<Set<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) Set.class);
>>>>>>> YOURS
/**
* The meta-property for the {@code currentDate} property.
*/
private final MetaProperty<LocalDate> _currentDate = DirectMetaProperty.ofReadWrite(this, "currentDate", LocalDate.class);
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings({"unchecked", "rawtypes" })
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("currentDate", _currentDate);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings({"unchecked", "rawtypes" })
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<Set<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code currentDate} property.
* @return the meta-property, not null
*/
public final MetaProperty<LocalDate> currentDate() {
return _currentDate;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
}
Safe
/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.time.calendar.LocalDate;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest <T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The time series identifier for loading specific data points range.
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* The identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
*/
@PropertyDefinition
private String _identifierValue;
/**
* The identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
*/
@PropertyDefinition
private final Set<Identifier> _identifiers = new HashSet<Identifier>();
/**
* Current date (if appicalable for identifiers)
*/
@PropertyDefinition
private LocalDate _currentDate;
/**
* The data source, null to search all data sources.
*/
@PropertyDefinition
private String _dataSource;
/**
* The data provider, null to search all data providers.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The data field to search, null to search all data fields.
*/
@PropertyDefinition
private String _dataField;
/**
* The observation time, null to search all observation times.
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in data store.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search until the end date in data store.
*/
@PropertyDefinition
private T _end;
/**
* Set to true to load data points, otherwise return just meta data.
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true to load the start and end date for time series.
*/
@PropertyDefinition
private boolean _loadDates;
/**
* Creates an instance.
*/
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 600751303: // currentDate
return getCurrentDate();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((Set<Identifier>) newValue);
return;
case 600751303: // currentDate
setCurrentDate((LocalDate) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the time series identifier for loading specific data points range.
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the time series identifier for loading specific data points range.
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the value of the property
*/
public Set<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @param identifiers the new value of the property
*/
<<<<<<< MINE
public void setIdentifiers(List<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
=======
public void setIdentifiers(Set<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
>>>>>>> YOURS
/**
* Gets the the {@code identifiers} property.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the property, not null
*/
public final Property<Set<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets current date (if appicalable for identifiers)
* @return the value of the property
*/
public LocalDate getCurrentDate() {
return _currentDate;
}
/**
* Sets current date (if appicalable for identifiers)
* @param currentDate the new value of the property
*/
public void setCurrentDate(LocalDate currentDate) {
this._currentDate = currentDate;
}
/**
* Gets the the {@code currentDate} property.
* @return the property, not null
*/
public final Property<LocalDate> currentDate() {
return metaBean().currentDate().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data source, null to search all data sources.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the data source, null to search all data sources.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data provider, null to search all data providers.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the data provider, null to search all data providers.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data field to search, null to search all data fields.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the data field to search, null to search all data fields.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observation time, null to search all observation times.
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observation time, null to search all observation times.
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in data store.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in data store.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search until the end date in data store.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search until the end date in data store.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load data points, otherwise return just meta data.
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true to load data points, otherwise return just meta data.
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load the start and end date for time series.
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true to load the start and end date for time series.
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta <T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("rawtypes")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
<<<<<<< MINE
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) List.class);
=======
@SuppressWarnings("unchecked")
private final MetaProperty<Set<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) Set.class);
>>>>>>> YOURS
/**
* The meta-property for the {@code currentDate} property.
*/
private final MetaProperty<LocalDate> _currentDate = DirectMetaProperty.ofReadWrite(this, "currentDate", LocalDate.class);
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings({"unchecked", "rawtypes" })
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("currentDate", _currentDate);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings({"unchecked", "rawtypes" })
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<Set<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code currentDate} property.
* @return the meta-property, not null
*/
public final MetaProperty<LocalDate> currentDate() {
return _currentDate;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
}
/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.time.calendar.LocalDate;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest <T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The time series identifier for loading specific data points range.
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* The identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
*/
@PropertyDefinition
private String _identifierValue;
/**
* The identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
*/
@PropertyDefinition
private final Set<Identifier> _identifiers = new HashSet<Identifier>();
/**
* Current date (if appicalable for identifiers)
*/
@PropertyDefinition
private LocalDate _currentDate;
/**
* The data source, null to search all data sources.
*/
@PropertyDefinition
private String _dataSource;
/**
* The data provider, null to search all data providers.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The data field to search, null to search all data fields.
*/
@PropertyDefinition
private String _dataField;
/**
* The observation time, null to search all observation times.
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in data store.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search until the end date in data store.
*/
@PropertyDefinition
private T _end;
/**
* Set to true to load data points, otherwise return just meta data.
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true to load the start and end date for time series.
*/
@PropertyDefinition
private boolean _loadDates;
/**
* Creates an instance.
*/
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 600751303: // currentDate
return getCurrentDate();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((Set<Identifier>) newValue);
return;
case 600751303: // currentDate
setCurrentDate((LocalDate) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the time series identifier for loading specific data points range.
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the time series identifier for loading specific data points range.
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the value of the property
*/
public Set<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @param identifiers the new value of the property
*/
<<<<<<< MINE
public void setIdentifiers(List<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
=======
public void setIdentifiers(Set<Identifier> identifiers) {
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
>>>>>>> YOURS
/**
* Gets the the {@code identifiers} property.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the property, not null
*/
public final Property<Set<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets current date (if appicalable for identifiers)
* @return the value of the property
*/
public LocalDate getCurrentDate() {
return _currentDate;
}
/**
* Sets current date (if appicalable for identifiers)
* @param currentDate the new value of the property
*/
public void setCurrentDate(LocalDate currentDate) {
this._currentDate = currentDate;
}
/**
* Gets the the {@code currentDate} property.
* @return the property, not null
*/
public final Property<LocalDate> currentDate() {
return metaBean().currentDate().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data source, null to search all data sources.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the data source, null to search all data sources.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data provider, null to search all data providers.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the data provider, null to search all data providers.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data field to search, null to search all data fields.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the data field to search, null to search all data fields.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observation time, null to search all observation times.
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observation time, null to search all observation times.
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in data store.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in data store.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search until the end date in data store.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search until the end date in data store.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load data points, otherwise return just meta data.
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true to load data points, otherwise return just meta data.
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load the start and end date for time series.
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true to load the start and end date for time series.
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta <T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("rawtypes")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
<<<<<<< MINE
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) List.class);
=======
@SuppressWarnings("unchecked")
private final MetaProperty<Set<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) Set.class);
>>>>>>> YOURS
/**
* The meta-property for the {@code currentDate} property.
*/
private final MetaProperty<LocalDate> _currentDate = DirectMetaProperty.ofReadWrite(this, "currentDate", LocalDate.class);
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings({"unchecked", "rawtypes" })
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("currentDate", _currentDate);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings({"unchecked", "rawtypes" })
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<Set<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code currentDate} property.
* @return the meta-property, not null
*/
public final MetaProperty<LocalDate> currentDate() {
return _currentDate;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
}
Unstructured
/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
<<<<<<< MINE
import java.util.ArrayList;
=======
>>>>>>> YOURS
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.time.calendar.LocalDate;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest<T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The time series identifier for loading specific data points range.
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* The identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
*/
@PropertyDefinition
private String _identifierValue;
/**
* The identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
*/
@PropertyDefinition
private final Set<Identifier> _identifiers = new HashSet<Identifier>();
/**
* Current date (if appicalable for identifiers)
*/
@PropertyDefinition
private LocalDate _currentDate;
/**
* The data source, null to search all data sources.
*/
@PropertyDefinition
private String _dataSource;
/**
* The data provider, null to search all data providers.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The data field to search, null to search all data fields.
*/
@PropertyDefinition
private String _dataField;
/**
* The observation time, null to search all observation times.
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in data store.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search until the end date in data store.
*/
@PropertyDefinition
private T _end;
/**
* Set to true to load data points, otherwise return just meta data.
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true to load the start and end date for time series.
*/
@PropertyDefinition
private boolean _loadDates;
/**
* Creates an instance.
*/
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 600751303: // currentDate
return getCurrentDate();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((Set<Identifier>) newValue);
return;
case 600751303: // currentDate
setCurrentDate((LocalDate) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the time series identifier for loading specific data points range.
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the time series identifier for loading specific data points range.
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the value of the property
*/
public Set<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @param identifiers the new value of the property
*/
<<<<<<< MINE
public void setIdentifiers(List<Identifier> identifiers) {
=======
public void setIdentifiers(Set<Identifier> identifiers) {
>>>>>>> YOURS
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
/**
* Gets the the {@code identifiers} property.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the property, not null
*/
public final Property<Set<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets current date (if appicalable for identifiers)
* @return the value of the property
*/
public LocalDate getCurrentDate() {
return _currentDate;
}
/**
* Sets current date (if appicalable for identifiers)
* @param currentDate the new value of the property
*/
public void setCurrentDate(LocalDate currentDate) {
this._currentDate = currentDate;
}
/**
* Gets the the {@code currentDate} property.
* @return the property, not null
*/
public final Property<LocalDate> currentDate() {
return metaBean().currentDate().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data source, null to search all data sources.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the data source, null to search all data sources.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data provider, null to search all data providers.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the data provider, null to search all data providers.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data field to search, null to search all data fields.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the data field to search, null to search all data fields.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observation time, null to search all observation times.
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observation time, null to search all observation times.
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in data store.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in data store.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search until the end date in data store.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search until the end date in data store.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load data points, otherwise return just meta data.
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true to load data points, otherwise return just meta data.
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load the start and end date for time series.
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true to load the start and end date for time series.
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta<T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("rawtypes")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
<<<<<<< MINE
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) List.class);
=======
@SuppressWarnings("unchecked")
private final MetaProperty<Set<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) Set.class);
/**
* The meta-property for the {@code currentDate} property.
*/
private final MetaProperty<LocalDate> _currentDate = DirectMetaProperty.ofReadWrite(this, "currentDate", LocalDate.class);
>>>>>>> YOURS
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings({"unchecked", "rawtypes" })
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("currentDate", _currentDate);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings({"unchecked", "rawtypes" })
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<Set<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code currentDate} property.
* @return the meta-property, not null
*/
public final MetaProperty<LocalDate> currentDate() {
return _currentDate;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}/**
* Copyright (C) 2009 - 2010 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.timeseries;
<<<<<<< MINE
import java.util.ArrayList;
=======
>>>>>>> YOURS
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import javax.time.calendar.LocalDate;
import org.joda.beans.BeanDefinition;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.BasicMetaBean;
import org.joda.beans.impl.direct.DirectBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import com.opengamma.id.Identifier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.util.db.PagingRequest;
/**
* Request for searching for TimeSeries.
*
* @param <T> LocalDate/java.sql.Date
*/
@BeanDefinition
public class TimeSeriesSearchRequest<T> extends DirectBean {
/**
* The request for paging.
* By default all matching items will be returned.
*/
@PropertyDefinition
private PagingRequest _pagingRequest = PagingRequest.ALL;
/**
* The time series identifier for loading specific data points range.
*/
@PropertyDefinition
private UniqueIdentifier _timeSeriesId;
/**
* The identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
*/
@PropertyDefinition
private String _identifierValue;
/**
* The identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
*/
@PropertyDefinition
private final Set<Identifier> _identifiers = new HashSet<Identifier>();
/**
* Current date (if appicalable for identifiers)
*/
@PropertyDefinition
private LocalDate _currentDate;
/**
* The data source, null to search all data sources.
*/
@PropertyDefinition
private String _dataSource;
/**
* The data provider, null to search all data providers.
*/
@PropertyDefinition
private String _dataProvider;
/**
* The data field to search, null to search all data fields.
*/
@PropertyDefinition
private String _dataField;
/**
* The observation time, null to search all observation times.
*/
@PropertyDefinition
private String _observationTime;
/**
* The start date, null to search from start date in data store.
*/
@PropertyDefinition
private T _start;
/**
* The end date, null to search until the end date in data store.
*/
@PropertyDefinition
private T _end;
/**
* Set to true to load data points, otherwise return just meta data.
*/
@PropertyDefinition
private boolean _loadTimeSeries;
/**
* Set to true to load the start and end date for time series.
*/
@PropertyDefinition
private boolean _loadDates;
/**
* Creates an instance.
*/
public TimeSeriesSearchRequest() {
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code TimeSeriesSearchRequest<T>}.
* @param <R> the bean's generic type
* @return the meta-bean, not null
*/
@SuppressWarnings("unchecked")
public static <R> TimeSeriesSearchRequest.Meta<R> meta() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@SuppressWarnings("unchecked")
@Override
public TimeSeriesSearchRequest.Meta<T> metaBean() {
return TimeSeriesSearchRequest.Meta.INSTANCE;
}
@Override
protected Object propertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
return getPagingRequest();
case 1709694943: // timeSeriesId
return getTimeSeriesId();
case 2085582408: // identifierValue
return getIdentifierValue();
case 1368189162: // identifiers
return getIdentifiers();
case 600751303: // currentDate
return getCurrentDate();
case 1272470629: // dataSource
return getDataSource();
case 339742651: // dataProvider
return getDataProvider();
case -386794640: // dataField
return getDataField();
case 951232793: // observationTime
return getObservationTime();
case 109757538: // start
return getStart();
case 100571: // end
return getEnd();
case 1833789738: // loadTimeSeries
return isLoadTimeSeries();
case 1364095295: // loadDates
return isLoadDates();
}
return super.propertyGet(propertyName);
}
@SuppressWarnings("unchecked")
@Override
protected void propertySet(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case -2092032669: // pagingRequest
setPagingRequest((PagingRequest) newValue);
return;
case 1709694943: // timeSeriesId
setTimeSeriesId((UniqueIdentifier) newValue);
return;
case 2085582408: // identifierValue
setIdentifierValue((String) newValue);
return;
case 1368189162: // identifiers
setIdentifiers((Set<Identifier>) newValue);
return;
case 600751303: // currentDate
setCurrentDate((LocalDate) newValue);
return;
case 1272470629: // dataSource
setDataSource((String) newValue);
return;
case 339742651: // dataProvider
setDataProvider((String) newValue);
return;
case -386794640: // dataField
setDataField((String) newValue);
return;
case 951232793: // observationTime
setObservationTime((String) newValue);
return;
case 109757538: // start
setStart((T) newValue);
return;
case 100571: // end
setEnd((T) newValue);
return;
case 1833789738: // loadTimeSeries
setLoadTimeSeries((Boolean) newValue);
return;
case 1364095295: // loadDates
setLoadDates((Boolean) newValue);
return;
}
super.propertySet(propertyName, newValue);
}
//-----------------------------------------------------------------------
/**
* Gets the request for paging.
* By default all matching items will be returned.
* @return the value of the property
*/
public PagingRequest getPagingRequest() {
return _pagingRequest;
}
/**
* Sets the request for paging.
* By default all matching items will be returned.
* @param pagingRequest the new value of the property
*/
public void setPagingRequest(PagingRequest pagingRequest) {
this._pagingRequest = pagingRequest;
}
/**
* Gets the the {@code pagingRequest} property.
* By default all matching items will be returned.
* @return the property, not null
*/
public final Property<PagingRequest> pagingRequest() {
return metaBean().pagingRequest().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the time series identifier for loading specific data points range.
* @return the value of the property
*/
public UniqueIdentifier getTimeSeriesId() {
return _timeSeriesId;
}
/**
* Sets the time series identifier for loading specific data points range.
* @param timeSeriesId the new value of the property
*/
public void setTimeSeriesId(UniqueIdentifier timeSeriesId) {
this._timeSeriesId = timeSeriesId;
}
/**
* Gets the the {@code timeSeriesId} property.
* @return the property, not null
*/
public final Property<UniqueIdentifier> timeSeriesId() {
return metaBean().timeSeriesId().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the value of the property
*/
public String getIdentifierValue() {
return _identifierValue;
}
/**
* Sets the identifier value, matching against the <b>value</b> of the identifiers,
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @param identifierValue the new value of the property
*/
public void setIdentifierValue(String identifierValue) {
this._identifierValue = identifierValue;
}
/**
* Gets the the {@code identifierValue} property.
* null to not match by identifier value.
* This matches against the {@link Identifier#getValue() value} of the identifier
* and does not match against the key. Wildcards are allowed.
* This method is suitable for human searching, whereas the {@code identifiers}
* search is useful for exact machine searching.
* @return the property, not null
*/
public final Property<String> identifierValue() {
return metaBean().identifierValue().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the value of the property
*/
public Set<Identifier> getIdentifiers() {
return _identifiers;
}
/**
* Sets the identifiers to match, null to not match on identifiers.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @param identifiers the new value of the property
*/
<<<<<<< MINE
public void setIdentifiers(List<Identifier> identifiers) {
=======
public void setIdentifiers(Set<Identifier> identifiers) {
>>>>>>> YOURS
this._identifiers.clear();
this._identifiers.addAll(identifiers);
}
/**
* Gets the the {@code identifiers} property.
* This will return time series where at least one complete identifier in the series matches
* at least one complete identifier in this bundle. Note that an empty bundle will not match
* anything, whereas a null bundle places no restrictions on the result.
* This method is suitable for exact machine searching, whereas the {@code identifierValue}
* search is useful for human searching.
* @return the property, not null
*/
public final Property<Set<Identifier>> identifiers() {
return metaBean().identifiers().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets current date (if appicalable for identifiers)
* @return the value of the property
*/
public LocalDate getCurrentDate() {
return _currentDate;
}
/**
* Sets current date (if appicalable for identifiers)
* @param currentDate the new value of the property
*/
public void setCurrentDate(LocalDate currentDate) {
this._currentDate = currentDate;
}
/**
* Gets the the {@code currentDate} property.
* @return the property, not null
*/
public final Property<LocalDate> currentDate() {
return metaBean().currentDate().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data source, null to search all data sources.
* @return the value of the property
*/
public String getDataSource() {
return _dataSource;
}
/**
* Sets the data source, null to search all data sources.
* @param dataSource the new value of the property
*/
public void setDataSource(String dataSource) {
this._dataSource = dataSource;
}
/**
* Gets the the {@code dataSource} property.
* @return the property, not null
*/
public final Property<String> dataSource() {
return metaBean().dataSource().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data provider, null to search all data providers.
* @return the value of the property
*/
public String getDataProvider() {
return _dataProvider;
}
/**
* Sets the data provider, null to search all data providers.
* @param dataProvider the new value of the property
*/
public void setDataProvider(String dataProvider) {
this._dataProvider = dataProvider;
}
/**
* Gets the the {@code dataProvider} property.
* @return the property, not null
*/
public final Property<String> dataProvider() {
return metaBean().dataProvider().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the data field to search, null to search all data fields.
* @return the value of the property
*/
public String getDataField() {
return _dataField;
}
/**
* Sets the data field to search, null to search all data fields.
* @param dataField the new value of the property
*/
public void setDataField(String dataField) {
this._dataField = dataField;
}
/**
* Gets the the {@code dataField} property.
* @return the property, not null
*/
public final Property<String> dataField() {
return metaBean().dataField().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the observation time, null to search all observation times.
* @return the value of the property
*/
public String getObservationTime() {
return _observationTime;
}
/**
* Sets the observation time, null to search all observation times.
* @param observationTime the new value of the property
*/
public void setObservationTime(String observationTime) {
this._observationTime = observationTime;
}
/**
* Gets the the {@code observationTime} property.
* @return the property, not null
*/
public final Property<String> observationTime() {
return metaBean().observationTime().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the start date, null to search from start date in data store.
* @return the value of the property
*/
public T getStart() {
return _start;
}
/**
* Sets the start date, null to search from start date in data store.
* @param start the new value of the property
*/
public void setStart(T start) {
this._start = start;
}
/**
* Gets the the {@code start} property.
* @return the property, not null
*/
public final Property<T> start() {
return metaBean().start().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the end date, null to search until the end date in data store.
* @return the value of the property
*/
public T getEnd() {
return _end;
}
/**
* Sets the end date, null to search until the end date in data store.
* @param end the new value of the property
*/
public void setEnd(T end) {
this._end = end;
}
/**
* Gets the the {@code end} property.
* @return the property, not null
*/
public final Property<T> end() {
return metaBean().end().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load data points, otherwise return just meta data.
* @return the value of the property
*/
public boolean isLoadTimeSeries() {
return _loadTimeSeries;
}
/**
* Sets set to true to load data points, otherwise return just meta data.
* @param loadTimeSeries the new value of the property
*/
public void setLoadTimeSeries(boolean loadTimeSeries) {
this._loadTimeSeries = loadTimeSeries;
}
/**
* Gets the the {@code loadTimeSeries} property.
* @return the property, not null
*/
public final Property<Boolean> loadTimeSeries() {
return metaBean().loadTimeSeries().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets set to true to load the start and end date for time series.
* @return the value of the property
*/
public boolean isLoadDates() {
return _loadDates;
}
/**
* Sets set to true to load the start and end date for time series.
* @param loadDates the new value of the property
*/
public void setLoadDates(boolean loadDates) {
this._loadDates = loadDates;
}
/**
* Gets the the {@code loadDates} property.
* @return the property, not null
*/
public final Property<Boolean> loadDates() {
return metaBean().loadDates().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code TimeSeriesSearchRequest}.
*/
public static class Meta<T> extends BasicMetaBean {
/**
* The singleton instance of the meta-bean.
*/
@SuppressWarnings("rawtypes")
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code pagingRequest} property.
*/
private final MetaProperty<PagingRequest> _pagingRequest = DirectMetaProperty.ofReadWrite(this, "pagingRequest", PagingRequest.class);
/**
* The meta-property for the {@code timeSeriesId} property.
*/
private final MetaProperty<UniqueIdentifier> _timeSeriesId = DirectMetaProperty.ofReadWrite(this, "timeSeriesId", UniqueIdentifier.class);
/**
* The meta-property for the {@code identifierValue} property.
*/
private final MetaProperty<String> _identifierValue = DirectMetaProperty.ofReadWrite(this, "identifierValue", String.class);
/**
* The meta-property for the {@code identifiers} property.
*/
<<<<<<< MINE
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<List<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) List.class);
=======
@SuppressWarnings("unchecked")
private final MetaProperty<Set<Identifier>> _identifiers = DirectMetaProperty.ofReadWrite(this, "identifiers", (Class) Set.class);
/**
* The meta-property for the {@code currentDate} property.
*/
private final MetaProperty<LocalDate> _currentDate = DirectMetaProperty.ofReadWrite(this, "currentDate", LocalDate.class);
>>>>>>> YOURS
/**
* The meta-property for the {@code dataSource} property.
*/
private final MetaProperty<String> _dataSource = DirectMetaProperty.ofReadWrite(this, "dataSource", String.class);
/**
* The meta-property for the {@code dataProvider} property.
*/
private final MetaProperty<String> _dataProvider = DirectMetaProperty.ofReadWrite(this, "dataProvider", String.class);
/**
* The meta-property for the {@code dataField} property.
*/
private final MetaProperty<String> _dataField = DirectMetaProperty.ofReadWrite(this, "dataField", String.class);
/**
* The meta-property for the {@code observationTime} property.
*/
private final MetaProperty<String> _observationTime = DirectMetaProperty.ofReadWrite(this, "observationTime", String.class);
/**
* The meta-property for the {@code start} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _start = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "start", Object.class);
/**
* The meta-property for the {@code end} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<T> _end = (DirectMetaProperty) DirectMetaProperty.ofReadWrite(this, "end", Object.class);
/**
* The meta-property for the {@code loadTimeSeries} property.
*/
private final MetaProperty<Boolean> _loadTimeSeries = DirectMetaProperty.ofReadWrite(this, "loadTimeSeries", Boolean.TYPE);
/**
* The meta-property for the {@code loadDates} property.
*/
private final MetaProperty<Boolean> _loadDates = DirectMetaProperty.ofReadWrite(this, "loadDates", Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<Object>> _map;
@SuppressWarnings({"unchecked", "rawtypes" })
protected Meta() {
LinkedHashMap temp = new LinkedHashMap();
temp.put("pagingRequest", _pagingRequest);
temp.put("timeSeriesId", _timeSeriesId);
temp.put("identifierValue", _identifierValue);
temp.put("identifiers", _identifiers);
temp.put("currentDate", _currentDate);
temp.put("dataSource", _dataSource);
temp.put("dataProvider", _dataProvider);
temp.put("dataField", _dataField);
temp.put("observationTime", _observationTime);
temp.put("start", _start);
temp.put("end", _end);
temp.put("loadTimeSeries", _loadTimeSeries);
temp.put("loadDates", _loadDates);
_map = Collections.unmodifiableMap(temp);
}
@Override
public TimeSeriesSearchRequest<T> createBean() {
return new TimeSeriesSearchRequest<T>();
}
@SuppressWarnings({"unchecked", "rawtypes" })
@Override
public Class<? extends TimeSeriesSearchRequest<T>> beanType() {
return (Class) TimeSeriesSearchRequest.class;
}
@Override
public Map<String, MetaProperty<Object>> metaPropertyMap() {
return _map;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code pagingRequest} property.
* @return the meta-property, not null
*/
public final MetaProperty<PagingRequest> pagingRequest() {
return _pagingRequest;
}
/**
* The meta-property for the {@code timeSeriesId} property.
* @return the meta-property, not null
*/
public final MetaProperty<UniqueIdentifier> timeSeriesId() {
return _timeSeriesId;
}
/**
* The meta-property for the {@code identifierValue} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> identifierValue() {
return _identifierValue;
}
/**
* The meta-property for the {@code identifiers} property.
* @return the meta-property, not null
*/
public final MetaProperty<Set<Identifier>> identifiers() {
return _identifiers;
}
/**
* The meta-property for the {@code currentDate} property.
* @return the meta-property, not null
*/
public final MetaProperty<LocalDate> currentDate() {
return _currentDate;
}
/**
* The meta-property for the {@code dataSource} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataSource() {
return _dataSource;
}
/**
* The meta-property for the {@code dataProvider} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataProvider() {
return _dataProvider;
}
/**
* The meta-property for the {@code dataField} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> dataField() {
return _dataField;
}
/**
* The meta-property for the {@code observationTime} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> observationTime() {
return _observationTime;
}
/**
* The meta-property for the {@code start} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> start() {
return _start;
}
/**
* The meta-property for the {@code end} property.
* @return the meta-property, not null
*/
public final MetaProperty<T> end() {
return _end;
}
/**
* The meta-property for the {@code loadTimeSeries} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadTimeSeries() {
return _loadTimeSeries;
}
/**
* The meta-property for the {@code loadDates} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> loadDates() {
return _loadDates;
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
Diff Result
No diff
Case 13 - ogplatform.rev_412e2_f4b6f.InMemoryConfigMaster.java
public InMemoryConfigMaster(final MasterChangeManager changeManager)
Left modified signature: type MasterChangeManager → ChangeManager
Right modified body: type ObjectIdentifierSupplier → ObjectIdSupplier
Unstructured reported conflict
Safe reported conflict
MergeMethods merged signature and body
KeepBothMethods kept both versions
public InMemoryConfigMaster(final Supplier objectIdSupplier)
Left modified body: type BasicMasterChangeManager → BasicChangeManager
Right modified signature: type ObjectIdentifier → ObjectId
Unstructured reported conflict
Safe reported conflict
MergeMethods merged signature and body
KeepBothMethods kept both versions
public InMemoryConfigMaster(final Supplier objectIdSupplier, final MasterChangeManager changeManager)
Left modified signature: type MasterChangeManager → ChangeManager
Right modified signature: type ObjectIdentifier → ObjectId
Unstructured reported conflict only on signature
Safe reported conflict of whole constructor
MergeMethods reported conflict of whole constructor
KeepBothMethods kept both versions
void remove
Left modified body
Right modified signature: type UniqueIdentifier → UniqueId
Unstructured merged changes: new signature and new body
Safe kept both versions
MergeMethods merged changes: new signature and new body
KeepBothMethods kept both versions
Base
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifiables;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectIdentifier, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final MasterChangeManager changeManager) {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueIdentifier uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
UniqueIdentifiables.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueIdentifier uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifiables;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectIdentifier, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final MasterChangeManager changeManager) {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueIdentifier uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
UniqueIdentifiables.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueIdentifier uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}
Left
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifiables;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectIdentifier, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueIdentifier uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
UniqueIdentifiables.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueIdentifier uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifiables;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectIdentifier, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueIdentifier uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
UniqueIdentifiables.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueIdentifier uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}
Right
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.IdUtils;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectId, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
IdUtils.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueId uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.IdUtils;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectId, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
IdUtils.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueId uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}
MergeMethods
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.IdUtils;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectId, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final ChangeManager changeManager) {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
IdUtils.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueId uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.IdUtils;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectId, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final ChangeManager changeManager) {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
IdUtils.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueId uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}
KeepBothMethods
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.IdUtils;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectId, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
IdUtils.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueId uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.IdUtils;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectId, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
IdUtils.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueId uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}
Safe
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.IdUtils;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectId, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
=======
public InMemoryConfigMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
=======
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
IdUtils.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueId uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.IdUtils;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectId, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
=======
public InMemoryConfigMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
=======
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
IdUtils.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueId uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}
Unstructured
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.IdUtils;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectId, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
=======
public InMemoryConfigMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
=======
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
=======
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
>>>>>>> YOURS
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
IdUtils.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueId uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.config.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.google.common.collect.Sets;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.IdUtils;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.config.ConfigDocument;
import com.opengamma.master.config.ConfigHistoryRequest;
import com.opengamma.master.config.ConfigHistoryResult;
import com.opengamma.master.config.ConfigMaster;
import com.opengamma.master.config.ConfigMetaDataRequest;
import com.opengamma.master.config.ConfigMetaDataResult;
import com.opengamma.master.config.ConfigSearchRequest;
import com.opengamma.master.config.ConfigSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code ConfigMaster}.
* <p>
* This master does not support versioning of configuration documents.
* <p>
* This implementation does not copy stored elements, making it thread-hostile.
* As such, this implementation is currently most useful for testing scenarios.
*/
public class InMemoryConfigMaster implements ConfigMaster {
/**
* The default scheme used for each {@link ObjectId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemCfg";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, ConfigDocument<?>> _store = new ConcurrentHashMap<ObjectId, ConfigDocument<?>>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryConfigMaster() {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
=======
public InMemoryConfigMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(InMemoryConfigMaster.DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
=======
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryConfigMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
=======
public InMemoryConfigMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
>>>>>>> YOURS
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public ConfigDocument<?> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ConfigDocument<?> document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> add(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getName(), "document.name");
ArgumentChecker.notNull(document.getValue(), "document.value");
final Object value = document.getValue();
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final Instant now = Instant.now();
IdUtils.setInto(value, uniqueId);
final ConfigDocument<Object> doc = new ConfigDocument<Object>(document.getType());
doc.setName(document.getName());
doc.setValue(value);
doc.setUniqueId(uniqueId);
doc.setVersionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return (ConfigDocument<T>) doc;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> update(ConfigDocument<T> document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getValue(), "document.value");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final ConfigDocument<?> storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Config not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigDocument<T> correct(ConfigDocument<T> document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(UniqueId uniqueId, Class<T> clazz) {
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(uniqueId);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + uniqueId.getObjectId());
}
return (ConfigDocument<T>) document;
}
//-------------------------------------------------------------------------
@Override
public ConfigMetaDataResult metaData(ConfigMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
ConfigMetaDataResult result = new ConfigMetaDataResult();
if (request.isConfigTypes()) {
Set<Class<?>> types = Sets.newHashSet();
for (ConfigDocument<?> doc : _store.values()) {
types.add(doc.getValue().getClass());
}
result.getConfigTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigSearchResult<T> search(ConfigSearchRequest<T> request) {
ArgumentChecker.notNull(request, "request");
final List<ConfigDocument<T>> list = new ArrayList<ConfigDocument<T>>();
for (ConfigDocument<?> doc : _store.values()) {
if (request.matches(doc)) {
list.add((ConfigDocument<T>) doc);
}
}
final ConfigSearchResult<T> result = new ConfigSearchResult<T>();
result.setPaging(Paging.of(request.getPagingRequest(), list));
List<ConfigDocument<T>> select = request.getPagingRequest().select(list);
result.getDocuments().addAll(select);
return result;
}
//-------------------------------------------------------------------------
@Override
public <T> ConfigHistoryResult<T> history(ConfigHistoryRequest<T> request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
ArgumentChecker.notNull(request.getType(), "request.configClazz");
final ConfigHistoryResult<T> result = new ConfigHistoryResult<T>();
final ConfigDocument<T> doc = get(request.getObjectId(), VersionCorrection.LATEST, request.getType());
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@SuppressWarnings("unchecked")
@Override
public <T> ConfigDocument<T> get(ObjectIdentifiable objectId, VersionCorrection versionCorrection, Class<T> clazz) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
ArgumentChecker.notNull(clazz, "clazz");
ConfigDocument<?> document = get(objectId, versionCorrection);
if (!clazz.isInstance(document.getValue())) {
throw new DataNotFoundException("Config not found: " + objectId);
}
return (ConfigDocument<T>) document;
}
}
Diff Result
No diff
Case 14 - ogplatform.rev_412e2_f4b6f.InMemoryHistoricalTimeSeriesMaster.java
public InMemoryConfigMaster(final MasterChangeManager changeManager)
Left modified signature: type MasterChangeManager → ChangeManager
Right modified body: type ObjectIdentifierSupplier → ObjectIdSupplier
Unstructured reported conflict
Safe reported conflict
MergeMethods merged signature and body
KeepBothMethods kept both versions
public InMemoryConfigMaster(final Supplier objectIdSupplier)
Left modified body: type BasicMasterChangeManager → BasicChangeManager
Right modified signature: type ObjectIdentifier → ObjectId
Unstructured reported conflict
Safe reported conflict
MergeMethods merged signature and body
KeepBothMethods kept both versions
public InMemoryConfigMaster(final Supplier objectIdSupplier, final MasterChangeManager changeManager)
Left modified signature: type MasterChangeManager → ChangeManager
Right modified signature: type ObjectIdentifier → ObjectId
Unstructured reported conflict only on signature
Safe reported conflict of whole constructor
MergeMethods reported conflict of whole constructor
KeepBothMethods kept both versions
void remove
Left modified body
Right modified signature: type UniqueIdentifier → UniqueId
Unstructured merged changes: new signature and new body
Safe kept both versions
MergeMethods merged changes: new signature and new body
KeepBothMethods kept both versions
Base
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectIdentifier, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectIdentifier, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final MasterChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueIdentifier uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectIdentifier objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueIdentifier uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectIdentifier objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueIdentifier updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectIdentifier objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.UPDATED, uniqueId, uniqueId, now);
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueIdentifier correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectIdentifier objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.CORRECTED, uniqueId, uniqueId, now);
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueIdentifier removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectIdentifier objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getIdentifiers(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getIdentifiers().asIdentifierBundle().getIdentifiers().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectIdentifier, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectIdentifier, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final MasterChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueIdentifier uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectIdentifier objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueIdentifier uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectIdentifier objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueIdentifier updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectIdentifier objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.UPDATED, uniqueId, uniqueId, now);
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueIdentifier correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectIdentifier objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.CORRECTED, uniqueId, uniqueId, now);
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueIdentifier removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectIdentifier objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getIdentifiers(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getIdentifiers().asIdentifierBundle().getIdentifiers().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}
Left
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectIdentifier, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectIdentifier, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueIdentifier uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectIdentifier objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueIdentifier uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectIdentifier objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueIdentifier updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectIdentifier objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.UPDATED, uniqueId, uniqueId, now);
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueIdentifier correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectIdentifier objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.CORRECTED, uniqueId, uniqueId, now);
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueIdentifier removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectIdentifier objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getIdentifiers(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getIdentifiers().asIdentifierBundle().getIdentifiers().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectIdentifier, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectIdentifier, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueIdentifier uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectIdentifier objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueIdentifier uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectIdentifier objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueIdentifier updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectIdentifier objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.UPDATED, uniqueId, uniqueId, now);
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueIdentifier correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectIdentifier objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.CORRECTED, uniqueId, uniqueId, now);
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueIdentifier removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectIdentifier objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getIdentifiers(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getIdentifiers().asIdentifierBundle().getIdentifiers().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}
Right
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectId, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectId, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectId, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectId, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectId objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueId uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueId updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.UPDATED, uniqueId, uniqueId, now);
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.CORRECTED, uniqueId, uniqueId, now);
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getExternalIdBundle(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getExternalIdBundle().toBundle().getExternalIds().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectId, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectId, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectId, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectId, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectId objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueId uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueId updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.UPDATED, uniqueId, uniqueId, now);
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.CORRECTED, uniqueId, uniqueId, now);
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getExternalIdBundle(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getExternalIdBundle().toBundle().getExternalIds().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}
MergeMethods
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectId, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectId, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectId, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectId, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final ChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectId objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueId uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueId updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.UPDATED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.UPDATED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.CORRECTED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.CORRECTED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getExternalIdBundle(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getExternalIdBundle().toBundle().getExternalIds().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectId, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectId, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectId, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectId, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final ChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectId objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueId uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueId updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.UPDATED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.UPDATED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.CORRECTED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.CORRECTED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getExternalIdBundle(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getExternalIdBundle().toBundle().getExternalIds().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}
KeepBothMethods
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectId, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectId, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectId, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectId, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectId objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueId uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueId updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.UPDATED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.UPDATED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.CORRECTED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.CORRECTED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getExternalIdBundle(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getExternalIdBundle().toBundle().getExternalIds().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectId, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectId, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectId, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectId, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectId objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueId uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueId updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.UPDATED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.UPDATED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.CORRECTED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.CORRECTED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getExternalIdBundle(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getExternalIdBundle().toBundle().getExternalIds().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}
Safe
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectId, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectId, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectId, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectId, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
=======
public InMemoryHistoricalTimeSeriesMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
=======
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectId objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueId uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueId updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.UPDATED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.UPDATED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.CORRECTED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.CORRECTED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getExternalIdBundle(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getExternalIdBundle().toBundle().getExternalIds().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectId, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectId, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectId, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectId, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
=======
public InMemoryHistoricalTimeSeriesMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
=======
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectId objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueId uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueId updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.UPDATED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.UPDATED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.CORRECTED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.CORRECTED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getExternalIdBundle(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getExternalIdBundle().toBundle().getExternalIds().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}
Unstructured
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectId, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectId, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectId, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectId, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
=======
public InMemoryHistoricalTimeSeriesMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
=======
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
=======
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
>>>>>>> YOURS
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectId objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueId uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueId updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.UPDATED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.UPDATED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.CORRECTED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.CORRECTED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getExternalIdBundle(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getExternalIdBundle().toBundle().getExternalIds().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.historicaltimeseries.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import javax.time.calendar.LocalDate;
import org.apache.commons.lang.StringUtils;
import org.joda.beans.JodaBeanUtils;
import com.google.common.base.Objects;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoDocument;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoHistoryResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoMetaDataResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchRequest;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesInfoSearchResult;
import com.opengamma.master.historicaltimeseries.HistoricalTimeSeriesMaster;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeries;
import com.opengamma.master.historicaltimeseries.ManageableHistoricalTimeSeriesInfo;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
import com.opengamma.util.timeseries.DoubleTimeSeriesOperators;
import com.opengamma.util.timeseries.localdate.ArrayLocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.LocalDateDoubleTimeSeries;
import com.opengamma.util.timeseries.localdate.MutableLocalDateDoubleTimeSeries;
/**
* An in-memory implementation of a historical time-series master.
*/
public class InMemoryHistoricalTimeSeriesMaster implements HistoricalTimeSeriesMaster {
/**
* The default scheme used for each {@link UniqueId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemHts";
/**
* A cache of time-series info by identifier.
*/
private final ConcurrentMap<ObjectId, HistoricalTimeSeriesInfoDocument> _storeInfo = new ConcurrentHashMap<ObjectId, HistoricalTimeSeriesInfoDocument>();
/**
* A cache of time-series points by identifier.
*/
private final ConcurrentMap<ObjectId, LocalDateDoubleTimeSeries> _storePoints = new ConcurrentHashMap<ObjectId, LocalDateDoubleTimeSeries>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemoryHistoricalTimeSeriesMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
=======
public InMemoryHistoricalTimeSeriesMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
=======
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
=======
public InMemoryHistoricalTimeSeriesMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
>>>>>>> YOURS
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoMetaDataResult metaData(HistoricalTimeSeriesInfoMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
HistoricalTimeSeriesInfoMetaDataResult result = new HistoricalTimeSeriesInfoMetaDataResult();
if (request.isDataFields()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataField());
}
result.getDataFields().addAll(types);
}
if (request.isDataSources()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataSource());
}
result.getDataSources().addAll(types);
}
if (request.isDataProviders()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getDataProvider());
}
result.getDataProviders().addAll(types);
}
if (request.isObservationTimes()) {
Set<String> types = new HashSet<String>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
types.add(doc.getInfo().getObservationTime());
}
result.getObservationTimes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoSearchResult search(HistoricalTimeSeriesInfoSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<HistoricalTimeSeriesInfoDocument> list = new ArrayList<HistoricalTimeSeriesInfoDocument>();
for (HistoricalTimeSeriesInfoDocument doc : _storeInfo.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
HistoricalTimeSeriesInfoSearchResult result = new HistoricalTimeSeriesInfoSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument get(final ObjectIdentifiable objectKey, VersionCorrection versionCorrection) {
validateId(objectKey);
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final ObjectId objectId = objectKey.getObjectId();
final HistoricalTimeSeriesInfoDocument document = _storeInfo.get(objectId);
if (document == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument add(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final HistoricalTimeSeriesInfoDocument cloned = JodaBeanUtils.clone(document);
final ManageableHistoricalTimeSeriesInfo info = cloned.getInfo();
info.setUniqueId(uniqueId);
final Instant now = Instant.now();
cloned.setVersionFromInstant(now);
cloned.setCorrectionFromInstant(now);
cloned.getInfo().setTimeSeriesObjectId(objectId);
_storeInfo.put(objectId, cloned);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return cloned;
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument update(final HistoricalTimeSeriesInfoDocument document) {
validateDocument(document);
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final HistoricalTimeSeriesInfoDocument storedDocument = _storeInfo.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_storeInfo.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
validateId(uniqueId);
if (_storeInfo.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Historical time-series not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoDocument correct(final HistoricalTimeSeriesInfoDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public HistoricalTimeSeriesInfoHistoryResult history(HistoricalTimeSeriesInfoHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final HistoricalTimeSeriesInfoHistoryResult result = new HistoricalTimeSeriesInfoHistoryResult();
final HistoricalTimeSeriesInfoDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(UniqueId uniqueId, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
return getTimeSeries(uniqueId.getObjectId(), VersionCorrection.LATEST, fromDateInclusive, toDateInclusive);
}
//-------------------------------------------------------------------------
@Override
public ManageableHistoricalTimeSeries getTimeSeries(ObjectIdentifiable objectKey, VersionCorrection versionCorrection, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
validateId(objectKey);
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
final Instant now = Instant.now();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
if (_storeInfo.get(objectId) == null) {
throw new DataNotFoundException("Historical time-series not found: " + objectId);
}
existingSeries = new ArrayLocalDateDoubleTimeSeries();
}
final LocalDateDoubleTimeSeries subSeries = existingSeries.subSeries(fromDateInclusive, toDateInclusive).toLocalDateDoubleTimeSeries();
final ManageableHistoricalTimeSeries result = new ManageableHistoricalTimeSeries();
result.setUniqueId(objectId.atLatestVersion());
result.setTimeSeries(subSeries);
result.setEarliest(existingSeries.getEarliestTime());
result.setLatest(existingSeries.getLatestTime());
result.setVersionInstant(now);
result.setCorrectionInstant(now);
return result;
}
//-------------------------------------------------------------------------
@Override
public UniqueId updateTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
final LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
if (series.getEarliestTime().isBefore(existingSeries.getLatestTime())) {
throw new IllegalArgumentException("Unable to add time-series as dates overlap");
}
LocalDateDoubleTimeSeries newSeries = existingSeries.noIntersectionOperation(series).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.UPDATED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.UPDATED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId correctTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDateDoubleTimeSeries series) {
ArgumentChecker.notNull(objectKey, "objectKey");
ArgumentChecker.notNull(series, "series");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries != null) {
LocalDateDoubleTimeSeries newSeries = existingSeries.unionOperate(series, DoubleTimeSeriesOperators.SECOND_OPERATOR).toLocalDateDoubleTimeSeries();
if (_storePoints.replace(objectId, existingSeries, newSeries) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
} else {
if (_storePoints.putIfAbsent(objectId, series) != null) {
throw new IllegalArgumentException("Concurrent modification");
}
}
final Instant now = Instant.now();
<<<<<<< MINE
final UniqueIdentifier uniqueId = objectId.atLatestVersion();
changeManager().entityChanged(ChangeType.CORRECTED, uniqueId, uniqueId, now);
=======
final UniqueId uniqueId = objectId.atLatestVersion();
changeManager().masterChanged(MasterChangedType.CORRECTED, uniqueId, uniqueId, now);
>>>>>>> YOURS
return uniqueId;
}
//-------------------------------------------------------------------------
@Override
public UniqueId removeTimeSeriesDataPoints(ObjectIdentifiable objectKey, LocalDate fromDateInclusive, LocalDate toDateInclusive) {
ArgumentChecker.notNull(objectKey, "objectKey");
fromDateInclusive = Objects.firstNonNull(fromDateInclusive, LocalDate.of(1000, 1, 1)); // TODO: JSR-310 min/max date
toDateInclusive = Objects.firstNonNull(toDateInclusive, LocalDate.of(9999, 1, 1));
ArgumentChecker.inOrderOrEqual(fromDateInclusive, toDateInclusive, "fromDateInclusive", "toDateInclusive");
final ObjectId objectId = objectKey.getObjectId();
LocalDateDoubleTimeSeries existingSeries = _storePoints.get(objectId);
if (existingSeries == null) {
return objectId.atLatestVersion();
}
MutableLocalDateDoubleTimeSeries mutableTS = existingSeries.toMutableLocalDateDoubleTimeSeries();
for (Iterator<LocalDate> it = mutableTS.timeIterator(); it.hasNext(); ) {
LocalDate date = it.next();
if (date.isBefore(fromDateInclusive) == false && date.isAfter(toDateInclusive) == false) {
it.remove();
}
}
if (_storePoints.replace(objectId, existingSeries, mutableTS.toLocalDateDoubleTimeSeries()) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
return objectId.atLatestVersion();
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
//-------------------------------------------------------------------------
private long validateId(ObjectIdentifiable objectId) {
ArgumentChecker.notNull(objectId, "objectId");
try {
return Long.parseLong(objectId.getObjectId().getValue());
} catch (NumberFormatException ex) {
throw new IllegalArgumentException("Invalid objectId " + objectId);
}
}
private void validateDocument(HistoricalTimeSeriesInfoDocument document) {
ArgumentChecker.notNull(document, "document");
if (document.getUniqueId() != null) {
validateId(document.getUniqueId());
}
ArgumentChecker.notNull(document.getInfo(), "document.series");
ArgumentChecker.notNull(document.getInfo().getExternalIdBundle(), "document.series.identifiers");
ArgumentChecker.isTrue(document.getInfo().getExternalIdBundle().toBundle().getExternalIds().size() > 0, "document.series.identifiers must not be empty");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataSource()), "document.series.dataSource must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataProvider()), "document.series.dataProvider must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getDataField()), "document.series.dataField must not be blank");
ArgumentChecker.isTrue(StringUtils.isNotBlank(document.getInfo().getObservationTime()), "document.series.observationTime must not be blank");
}
}
Diff Result
No diff
Case 15 - ogplatform.rev_412e2_f4b6f.InMemorySecurityMaster.java
public InMemoryConfigMaster(final MasterChangeManager changeManager)
Left modified signature: type MasterChangeManager → ChangeManager
Right modified body: type ObjectIdentifierSupplier → ObjectIdSupplier
Unstructured reported conflict
Safe reported conflict
MergeMethods merged signature and body
KeepBothMethods kept both versions
public InMemoryConfigMaster(final Supplier objectIdSupplier)
Left modified body: type BasicMasterChangeManager → BasicChangeManager
Right modified signature: type ObjectIdentifier → ObjectId
Unstructured reported conflict
Safe reported conflict
MergeMethods merged signature and body
KeepBothMethods kept both versions
public InMemoryConfigMaster(final Supplier objectIdSupplier, final MasterChangeManager changeManager)
Left modified signature: type MasterChangeManager → ChangeManager
Right modified signature: type ObjectIdentifier → ObjectId
Unstructured reported conflict only on signature
Safe reported conflict of whole constructor
MergeMethods reported conflict of whole constructor
KeepBothMethods kept both versions
void remove
Left modified body
Right modified signature: type UniqueIdentifier → UniqueId
Unstructured merged changes: new signature and new body
Safe kept both versions
MergeMethods merged changes: new signature and new body
KeepBothMethods kept both versions
Base
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, SecurityDocument> _store = new ConcurrentHashMap<ObjectIdentifier, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final MasterChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueIdentifier uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, SecurityDocument> _store = new ConcurrentHashMap<ObjectIdentifier, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final MasterChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueIdentifier uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
}
Left
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, SecurityDocument> _store = new ConcurrentHashMap<ObjectIdentifier, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueIdentifier uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, SecurityDocument> _store = new ConcurrentHashMap<ObjectIdentifier, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueIdentifier uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
Right
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, SecurityDocument> _store = new ConcurrentHashMap<ObjectId, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, SecurityDocument> _store = new ConcurrentHashMap<ObjectId, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
}
MergeMethods
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, SecurityDocument> _store = new ConcurrentHashMap<ObjectId, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final ChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, SecurityDocument> _store = new ConcurrentHashMap<ObjectId, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final ChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
KeepBothMethods
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, SecurityDocument> _store = new ConcurrentHashMap<ObjectId, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, SecurityDocument> _store = new ConcurrentHashMap<ObjectId, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
Safe
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, SecurityDocument> _store = new ConcurrentHashMap<ObjectId, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
=======
public InMemorySecurityMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
=======
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, SecurityDocument> _store = new ConcurrentHashMap<ObjectId, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
=======
public InMemorySecurityMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
=======
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
Unstructured
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, SecurityDocument> _store = new ConcurrentHashMap<ObjectId, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
=======
public InMemorySecurityMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
=======
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
=======
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
>>>>>>> YOURS
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.security.impl;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.security.ManageableSecurity;
import com.opengamma.master.security.SecurityDocument;
import com.opengamma.master.security.SecurityHistoryRequest;
import com.opengamma.master.security.SecurityHistoryResult;
import com.opengamma.master.security.SecurityMaster;
import com.opengamma.master.security.SecurityMetaDataRequest;
import com.opengamma.master.security.SecurityMetaDataResult;
import com.opengamma.master.security.SecuritySearchRequest;
import com.opengamma.master.security.SecuritySearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code SecurityMaster}.
* <p>
* This security master does not support versioning of securities.
*/
public class InMemorySecurityMaster implements SecurityMaster {
// TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSec";
/**
* A cache of securities by identifier.
*/
private final ConcurrentMap<ObjectId, SecurityDocument> _store = new ConcurrentHashMap<ObjectId, SecurityDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySecurityMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
=======
public InMemorySecurityMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
=======
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySecurityMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
=======
public InMemorySecurityMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
>>>>>>> YOURS
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public SecurityMetaDataResult metaData(SecurityMetaDataRequest request) {
ArgumentChecker.notNull(request, "request");
SecurityMetaDataResult result = new SecurityMetaDataResult();
if (request.isSecurityTypes()) {
Set<String> types = new HashSet<String>();
for (SecurityDocument doc : _store.values()) {
types.add(doc.getSecurity().getSecurityType());
}
result.getSecurityTypes().addAll(types);
}
return result;
}
//-------------------------------------------------------------------------
@Override
public SecuritySearchResult search(final SecuritySearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<SecurityDocument> list = new ArrayList<SecurityDocument>();
for (SecurityDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
SecuritySearchResult result = new SecuritySearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final UniqueId uniqueId) {
return get(uniqueId, VersionCorrection.LATEST);
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument get(final ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final SecurityDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Security not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument add(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableSecurity security = document.getSecurity();
security.setUniqueId(uniqueId);
final Instant now = Instant.now();
final SecurityDocument doc = new SecurityDocument(security);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument update(final SecurityDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSecurity(), "document.security");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final SecurityDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(final UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public SecurityDocument correct(final SecurityDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public SecurityHistoryResult history(final SecurityHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final SecurityHistoryResult result = new SecurityHistoryResult();
final SecurityDocument doc = get(request.getObjectId(), VersionCorrection.LATEST);
if (doc != null) {
result.getDocuments().add(doc);
}
result.setPaging(Paging.of(result.getDocuments()));
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
Diff Result
No diff
Case 16 - ogplatform.rev_412e2_f4b6f.InMemorySnapshotMaster.java
public InMemoryConfigMaster(final MasterChangeManager changeManager)
Left modified signature: type MasterChangeManager → ChangeManager
Right modified body: type ObjectIdentifierSupplier → ObjectIdSupplier
Unstructured reported conflict
Safe reported conflict
MergeMethods merged signature and body
KeepBothMethods kept both versions
public InMemoryConfigMaster(final Supplier objectIdSupplier)
Left modified body: type BasicMasterChangeManager → BasicChangeManager
Right modified signature: type ObjectIdentifier → ObjectId
Unstructured reported conflict
Safe reported conflict
MergeMethods merged signature and body
KeepBothMethods kept both versions
public InMemoryConfigMaster(final Supplier objectIdSupplier, final MasterChangeManager changeManager)
Left modified signature: type MasterChangeManager → ChangeManager
Right modified signature: type ObjectIdentifier → ObjectId
Unstructured reported conflict only on signature
Safe reported conflict of whole constructor
MergeMethods reported conflict of whole constructor
KeepBothMethods kept both versions
void remove
Left modified body
Right modified signature: type UniqueIdentifier → UniqueId
Unstructured merged changes: new signature and new body
Safe kept both versions
MergeMethods merged changes: new signature and new body
KeepBothMethods kept both versions
Base
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectIdentifier, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final MasterChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectIdentifier, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final MasterChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
}
Left
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectIdentifier, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectIdentifier;
import com.opengamma.id.ObjectIdentifierSupplier;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectIdentifier, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectIdentifier, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectIdentifier> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectIdentifier objectId = _objectIdSupplier.get();
final UniqueIdentifier uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueIdentifier uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
Right
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectId, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectId, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.listener.BasicMasterChangeManager;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectId, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectId, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final MasterChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.masterChanged(MasterChangedType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.masterChanged(MasterChangedType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
}
MergeMethods
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectId, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectId, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final ChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectId, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectId, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final ChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
KeepBothMethods
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectId, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectId, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectId, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectId, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
Safe
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectId, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectId, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
=======
public InMemorySnapshotMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
=======
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectIdentifier}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectId, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectId, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
=======
public InMemorySnapshotMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
=======
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
=======
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
>>>>>>> YOURS
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueIdentifier uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.masterChanged(MasterChangedType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
Unstructured
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectId, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectId, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
=======
public InMemorySnapshotMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
=======
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
=======
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
>>>>>>> YOURS
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.master.marketdatasnapshot.impl;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import javax.time.Instant;
import com.google.common.base.Supplier;
import com.opengamma.DataNotFoundException;
import com.opengamma.core.change.BasicChangeManager;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeType;
import com.opengamma.id.ObjectIdentifiable;
import com.opengamma.id.ObjectId;
import com.opengamma.id.ObjectIdSupplier;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.master.marketdatasnapshot.ManageableMarketDataSnapshot;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotDocument;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotHistoryResult;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotMaster;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchRequest;
import com.opengamma.master.marketdatasnapshot.MarketDataSnapshotSearchResult;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.db.Paging;
/**
* A simple, in-memory implementation of {@code MarketDataSnapshotMaster}.
* <p>
* This snapshot master does not support versioning of snapshots.
*/
public class InMemorySnapshotMaster implements MarketDataSnapshotMaster {
//TODO: This is not hardened for production, as the data in the master can
// be altered from outside as it is the same object
/**
* The default scheme used for each {@link ObjectId}.
*/
public static final String DEFAULT_OID_SCHEME = "MemSnap";
/**
* A cache of snapshots by identifier.
*/
private final ConcurrentMap<ObjectId, MarketDataSnapshotDocument> _store = new ConcurrentHashMap<ObjectId, MarketDataSnapshotDocument>();
/**
* The supplied of identifiers.
*/
private final Supplier<ObjectId> _objectIdSupplier;
/**
* The change manager.
*/
private final ChangeManager _changeManager;
/**
* Creates an instance.
*/
public InMemorySnapshotMaster() {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME));
}
/**
* Creates an instance specifying the change manager.
*
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final ChangeManager changeManager) {
this(new ObjectIdentifierSupplier(DEFAULT_OID_SCHEME), changeManager);
=======
public InMemorySnapshotMaster(final MasterChangeManager changeManager) {
this(new ObjectIdSupplier(DEFAULT_OID_SCHEME), changeManager);
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers.
*
* @param objectIdSupplier the supplier of object identifiers, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier) {
this(objectIdSupplier, new BasicChangeManager());
=======
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier) {
this(objectIdSupplier, new BasicMasterChangeManager());
>>>>>>> YOURS
}
/**
* Creates an instance specifying the supplier of object identifiers and change manager.
*
* @param objectIdSupplier the supplier of object identifiers, not null
* @param changeManager the change manager, not null
*/
<<<<<<< MINE
public InMemorySnapshotMaster(final Supplier<ObjectIdentifier> objectIdSupplier, final ChangeManager changeManager) {
=======
public InMemorySnapshotMaster(final Supplier<ObjectId> objectIdSupplier, final MasterChangeManager changeManager) {
>>>>>>> YOURS
ArgumentChecker.notNull(objectIdSupplier, "objectIdSupplier");
ArgumentChecker.notNull(changeManager, "changeManager");
_objectIdSupplier = objectIdSupplier;
_changeManager = changeManager;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotSearchResult search(MarketDataSnapshotSearchRequest request) {
ArgumentChecker.notNull(request, "request");
final List<MarketDataSnapshotDocument> list = new ArrayList<MarketDataSnapshotDocument>();
for (MarketDataSnapshotDocument doc : _store.values()) {
if (request.matches(doc)) {
list.add(doc);
}
}
MarketDataSnapshotSearchResult result = new MarketDataSnapshotSearchResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(request.getPagingRequest().select(list));
return result;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
final MarketDataSnapshotDocument document = _store.get(uniqueId.getObjectId());
if (document == null || !document.getUniqueId().equals(uniqueId)) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument get(ObjectIdentifiable objectId, VersionCorrection versionCorrection) {
ArgumentChecker.notNull(objectId, "objectId");
ArgumentChecker.notNull(versionCorrection, "versionCorrection");
final MarketDataSnapshotDocument document = _store.get(objectId.getObjectId());
if (document == null) {
throw new DataNotFoundException("Snapshot not found: " + objectId);
}
return document;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument add(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final ObjectId objectId = _objectIdSupplier.get();
final UniqueId uniqueId = objectId.atVersion("");
final ManageableMarketDataSnapshot snapshot = document.getSnapshot();
snapshot.setUniqueId(uniqueId);
final Instant now = Instant.now();
final MarketDataSnapshotDocument doc = new MarketDataSnapshotDocument(snapshot);
doc.setVersionFromInstant(now);
doc.setCorrectionFromInstant(now);
_store.put(objectId, doc);
_changeManager.entityChanged(ChangeType.ADDED, null, uniqueId, now);
return doc;
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument update(MarketDataSnapshotDocument document) {
ArgumentChecker.notNull(document, "document");
ArgumentChecker.notNull(document.getUniqueId(), "document.uniqueId");
ArgumentChecker.notNull(document.getSnapshot(), "document.snapshot");
final UniqueId uniqueId = document.getUniqueId();
final Instant now = Instant.now();
final MarketDataSnapshotDocument storedDocument = _store.get(uniqueId.getObjectId());
if (storedDocument == null) {
throw new DataNotFoundException("Snapshot not found: " + uniqueId);
}
document.setVersionFromInstant(now);
document.setVersionToInstant(null);
document.setCorrectionFromInstant(now);
document.setCorrectionToInstant(null);
if (_store.replace(uniqueId.getObjectId(), storedDocument, document) == false) {
throw new IllegalArgumentException("Concurrent modification");
}
_changeManager.entityChanged(ChangeType.UPDATED, uniqueId, document.getUniqueId(), now);
return document;
}
//-------------------------------------------------------------------------
@Override
public void remove(UniqueId uniqueId) {
ArgumentChecker.notNull(uniqueId, "uniqueId");
if (_store.remove(uniqueId.getObjectId()) == null) {
throw new DataNotFoundException("Security not found: " + uniqueId);
}
_changeManager.entityChanged(ChangeType.REMOVED, uniqueId, null, Instant.now());
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotDocument correct(MarketDataSnapshotDocument document) {
return update(document);
}
//-------------------------------------------------------------------------
@Override
public MarketDataSnapshotHistoryResult history(MarketDataSnapshotHistoryRequest request) {
ArgumentChecker.notNull(request, "request");
ArgumentChecker.notNull(request.getObjectId(), "request.objectId");
final MarketDataSnapshotDocument doc = _store.get(request.getObjectId());
final List<MarketDataSnapshotDocument> list = (doc != null) ? Collections.singletonList(doc) : Collections.<MarketDataSnapshotDocument>emptyList();
final MarketDataSnapshotHistoryResult result = new MarketDataSnapshotHistoryResult();
result.setPaging(Paging.of(request.getPagingRequest(), list));
result.getDocuments().addAll(list);
return result;
}
//-------------------------------------------------------------------------
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
Diff Result
No diff
Case 17 - ogplatform.rev_412e2_f4b6f.ViewProcessorManagerTest.java
void masterChanged
Left renamed to entityChanged. Changed signature parameter type: MasterChangedType → ChangeType
Right modified signature parameter types: UniqueIdentifier → UniqueId
Unstructured reported conflict between signatures
Safe reported conflict between signatures
MergeMethods reported conflict between signatures
KeepBothMethods kept both versions of method
Base
/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.master.listener.MasterChangeListener;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChanged;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.master.listener.NotifyingMaster;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueIdentifier.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueIdentifier getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueIdentifier viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueIdentifier clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements MasterChangeManager {
private MasterChangeListener _listener;
@Override
public void addChangeListener(MasterChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(MasterChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
@Override
public void masterChanged(MasterChangedType type, UniqueIdentifier beforeId, UniqueIdentifier afterId, Instant versionInstant) {
}
public void notifyListenerUnwatchedIdentifier() {
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueIdentifier.of("Test", "Unwatched"), UniqueIdentifier.of("Test", "UnwatchedNew"), Instant.now()));
}
public void notifyListenerWatchedIdentifier() {
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueIdentifier.of("Test", "Watched"), UniqueIdentifier.of("Test", "WatchedNew"), Instant.now()));
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements NotifyingMaster {
private MasterChangeManager _changeManager = new MockChangeManager();
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}
/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.master.listener.MasterChangeListener;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChanged;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.master.listener.NotifyingMaster;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueIdentifier.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueIdentifier getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueIdentifier viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueIdentifier clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements MasterChangeManager {
private MasterChangeListener _listener;
@Override
public void addChangeListener(MasterChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(MasterChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
@Override
public void masterChanged(MasterChangedType type, UniqueIdentifier beforeId, UniqueIdentifier afterId, Instant versionInstant) {
}
public void notifyListenerUnwatchedIdentifier() {
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueIdentifier.of("Test", "Unwatched"), UniqueIdentifier.of("Test", "UnwatchedNew"), Instant.now()));
}
public void notifyListenerWatchedIdentifier() {
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueIdentifier.of("Test", "Watched"), UniqueIdentifier.of("Test", "WatchedNew"), Instant.now()));
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements NotifyingMaster {
private MasterChangeManager _changeManager = new MockChangeManager();
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}
Left
/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.core.change.ChangeEvent;
import com.opengamma.core.change.ChangeListener;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeProvider;
import com.opengamma.core.change.ChangeType;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueIdentifier.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueIdentifier getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueIdentifier viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueIdentifier clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements ChangeManager {
private ChangeListener _listener;
@Override
public void addChangeListener(ChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(ChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
@Override
public void entityChanged(ChangeType type, UniqueIdentifier beforeId, UniqueIdentifier afterId, Instant versionInstant) {
}
public void notifyListenerUnwatchedIdentifier() {
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Unwatched"), UniqueIdentifier.of("Test", "UnwatchedNew"), Instant.now()));
}
public void notifyListenerWatchedIdentifier() {
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Watched"), UniqueIdentifier.of("Test", "WatchedNew"), Instant.now()));
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements ChangeProvider {
private ChangeManager _changeManager = new MockChangeManager();
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}
/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.core.change.ChangeEvent;
import com.opengamma.core.change.ChangeListener;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeProvider;
import com.opengamma.core.change.ChangeType;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueIdentifier;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueIdentifier.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueIdentifier getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueIdentifier viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueIdentifier clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements ChangeManager {
private ChangeListener _listener;
@Override
public void addChangeListener(ChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(ChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
@Override
public void entityChanged(ChangeType type, UniqueIdentifier beforeId, UniqueIdentifier afterId, Instant versionInstant) {
}
public void notifyListenerUnwatchedIdentifier() {
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Unwatched"), UniqueIdentifier.of("Test", "UnwatchedNew"), Instant.now()));
}
public void notifyListenerWatchedIdentifier() {
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Watched"), UniqueIdentifier.of("Test", "WatchedNew"), Instant.now()));
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements ChangeProvider {
private ChangeManager _changeManager = new MockChangeManager();
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}
Right
/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.master.listener.MasterChangeListener;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChanged;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.master.listener.NotifyingMaster;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueId.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueId getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueId viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueId clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements MasterChangeManager {
private MasterChangeListener _listener;
@Override
public void addChangeListener(MasterChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(MasterChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
@Override
public void masterChanged(MasterChangedType type, UniqueId beforeId, UniqueId afterId, Instant versionInstant) {
}
public void notifyListenerUnwatchedIdentifier() {
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Unwatched"), UniqueId.of("Test", "UnwatchedNew"), Instant.now()));
}
public void notifyListenerWatchedIdentifier() {
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Watched"), UniqueId.of("Test", "WatchedNew"), Instant.now()));
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements NotifyingMaster {
private MasterChangeManager _changeManager = new MockChangeManager();
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}
/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.master.listener.MasterChangeListener;
import com.opengamma.master.listener.MasterChangeManager;
import com.opengamma.master.listener.MasterChanged;
import com.opengamma.master.listener.MasterChangedType;
import com.opengamma.master.listener.NotifyingMaster;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueId.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueId getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueId viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueId clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements MasterChangeManager {
private MasterChangeListener _listener;
@Override
public void addChangeListener(MasterChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(MasterChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
@Override
public void masterChanged(MasterChangedType type, UniqueId beforeId, UniqueId afterId, Instant versionInstant) {
}
public void notifyListenerUnwatchedIdentifier() {
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Unwatched"), UniqueId.of("Test", "UnwatchedNew"), Instant.now()));
}
public void notifyListenerWatchedIdentifier() {
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Watched"), UniqueId.of("Test", "WatchedNew"), Instant.now()));
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements NotifyingMaster {
private MasterChangeManager _changeManager = new MockChangeManager();
@Override
public MasterChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}
MergeMethods
/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.core.change.ChangeEvent;
import com.opengamma.core.change.ChangeListener;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeProvider;
import com.opengamma.core.change.ChangeType;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueId.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueId getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueId viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueId clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements ChangeManager {
private ChangeListener _listener;
@Override
public void addChangeListener(ChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(ChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
<<<<<<< MINE
@Override
public void entityChanged(ChangeType type, UniqueIdentifier beforeId, UniqueIdentifier afterId, Instant versionInstant) {
}
=======
@Override
public void masterChanged(MasterChangedType type, UniqueId beforeId, UniqueId afterId, Instant versionInstant) {
}
>>>>>>> YOURS
public void notifyListenerUnwatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Unwatched"), UniqueIdentifier.of("Test", "UnwatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Unwatched"), UniqueId.of("Test", "UnwatchedNew"), Instant.now()));
>>>>>>> YOURS
}
public void notifyListenerWatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Watched"), UniqueIdentifier.of("Test", "WatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Watched"), UniqueId.of("Test", "WatchedNew"), Instant.now()));
>>>>>>> YOURS
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements ChangeProvider {
private ChangeManager _changeManager = new MockChangeManager();
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}
/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.core.change.ChangeEvent;
import com.opengamma.core.change.ChangeListener;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeProvider;
import com.opengamma.core.change.ChangeType;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueId.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueId getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueId viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueId clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements ChangeManager {
private ChangeListener _listener;
@Override
public void addChangeListener(ChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(ChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
<<<<<<< MINE
@Override
public void entityChanged(ChangeType type, UniqueIdentifier beforeId, UniqueIdentifier afterId, Instant versionInstant) {
}
=======
@Override
public void masterChanged(MasterChangedType type, UniqueId beforeId, UniqueId afterId, Instant versionInstant) {
}
>>>>>>> YOURS
public void notifyListenerUnwatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Unwatched"), UniqueIdentifier.of("Test", "UnwatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Unwatched"), UniqueId.of("Test", "UnwatchedNew"), Instant.now()));
>>>>>>> YOURS
}
public void notifyListenerWatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Watched"), UniqueIdentifier.of("Test", "WatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Watched"), UniqueId.of("Test", "WatchedNew"), Instant.now()));
>>>>>>> YOURS
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements ChangeProvider {
private ChangeManager _changeManager = new MockChangeManager();
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}
KeepBothMethods
/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.core.change.ChangeEvent;
import com.opengamma.core.change.ChangeListener;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeProvider;
import com.opengamma.core.change.ChangeType;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueId.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueId getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueId viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueId clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements ChangeManager {
private ChangeListener _listener;
@Override
public void addChangeListener(ChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(ChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
@Override
public void entityChanged(ChangeType type, UniqueIdentifier beforeId, UniqueIdentifier afterId, Instant versionInstant) {
}
@Override
public void masterChanged(MasterChangedType type, UniqueId beforeId, UniqueId afterId, Instant versionInstant) {
}
public void notifyListenerUnwatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Unwatched"), UniqueIdentifier.of("Test", "UnwatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Unwatched"), UniqueId.of("Test", "UnwatchedNew"), Instant.now()));
>>>>>>> YOURS
}
public void notifyListenerWatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Watched"), UniqueIdentifier.of("Test", "WatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Watched"), UniqueId.of("Test", "WatchedNew"), Instant.now()));
>>>>>>> YOURS
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements ChangeProvider {
private ChangeManager _changeManager = new MockChangeManager();
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}
/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.core.change.ChangeEvent;
import com.opengamma.core.change.ChangeListener;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeProvider;
import com.opengamma.core.change.ChangeType;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueId.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueId getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueId viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueId clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements ChangeManager {
private ChangeListener _listener;
@Override
public void addChangeListener(ChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(ChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
@Override
public void entityChanged(ChangeType type, UniqueIdentifier beforeId, UniqueIdentifier afterId, Instant versionInstant) {
}
@Override
public void masterChanged(MasterChangedType type, UniqueId beforeId, UniqueId afterId, Instant versionInstant) {
}
public void notifyListenerUnwatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Unwatched"), UniqueIdentifier.of("Test", "UnwatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Unwatched"), UniqueId.of("Test", "UnwatchedNew"), Instant.now()));
>>>>>>> YOURS
}
public void notifyListenerWatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Watched"), UniqueIdentifier.of("Test", "WatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Watched"), UniqueId.of("Test", "WatchedNew"), Instant.now()));
>>>>>>> YOURS
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements ChangeProvider {
private ChangeManager _changeManager = new MockChangeManager();
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}
Safe
/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.core.change.ChangeEvent;
import com.opengamma.core.change.ChangeListener;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeProvider;
import com.opengamma.core.change.ChangeType;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueId.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueId getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueId viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueId clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements ChangeManager {
private ChangeListener _listener;
@Override
public void addChangeListener(ChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(ChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
<<<<<<< MINE
@Override
public void entityChanged(ChangeType type, UniqueIdentifier beforeId, UniqueIdentifier afterId, Instant versionInstant) {
}
=======
@Override
public void masterChanged(MasterChangedType type, UniqueId beforeId, UniqueId afterId, Instant versionInstant) {
}
>>>>>>> YOURS
public void notifyListenerUnwatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Unwatched"), UniqueIdentifier.of("Test", "UnwatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Unwatched"), UniqueId.of("Test", "UnwatchedNew"), Instant.now()));
>>>>>>> YOURS
}
public void notifyListenerWatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Watched"), UniqueIdentifier.of("Test", "WatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Watched"), UniqueId.of("Test", "WatchedNew"), Instant.now()));
>>>>>>> YOURS
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements ChangeProvider {
private ChangeManager _changeManager = new MockChangeManager();
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}
/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.core.change.ChangeEvent;
import com.opengamma.core.change.ChangeListener;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeProvider;
import com.opengamma.core.change.ChangeType;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueId.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueId getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueId viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueId clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements ChangeManager {
private ChangeListener _listener;
@Override
public void addChangeListener(ChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(ChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
<<<<<<< MINE
@Override
public void entityChanged(ChangeType type, UniqueIdentifier beforeId, UniqueIdentifier afterId, Instant versionInstant) {
}
=======
@Override
public void masterChanged(MasterChangedType type, UniqueId beforeId, UniqueId afterId, Instant versionInstant) {
}
>>>>>>> YOURS
public void notifyListenerUnwatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Unwatched"), UniqueIdentifier.of("Test", "UnwatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Unwatched"), UniqueId.of("Test", "UnwatchedNew"), Instant.now()));
>>>>>>> YOURS
}
public void notifyListenerWatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Watched"), UniqueIdentifier.of("Test", "WatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Watched"), UniqueId.of("Test", "WatchedNew"), Instant.now()));
>>>>>>> YOURS
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements ChangeProvider {
private ChangeManager _changeManager = new MockChangeManager();
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}
Unstructured
/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.core.change.ChangeEvent;
import com.opengamma.core.change.ChangeListener;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeProvider;
import com.opengamma.core.change.ChangeType;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueId.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueId getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueId viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueId clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements ChangeManager {
private ChangeListener _listener;
@Override
public void addChangeListener(ChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(ChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
@Override
<<<<<<< MINE
public void entityChanged(ChangeType type, UniqueIdentifier beforeId, UniqueIdentifier afterId, Instant versionInstant) {
=======
public void masterChanged(MasterChangedType type, UniqueId beforeId, UniqueId afterId, Instant versionInstant) {
>>>>>>> YOURS
}
public void notifyListenerUnwatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Unwatched"), UniqueIdentifier.of("Test", "UnwatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Unwatched"), UniqueId.of("Test", "UnwatchedNew"), Instant.now()));
>>>>>>> YOURS
}
public void notifyListenerWatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Watched"), UniqueIdentifier.of("Test", "WatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Watched"), UniqueId.of("Test", "WatchedNew"), Instant.now()));
>>>>>>> YOURS
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements ChangeProvider {
private ChangeManager _changeManager = new MockChangeManager();
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}/**
* Copyright (C) 2009 - 2009 by OpenGamma Inc.
*
* Please see distribution for license.
*/
package com.opengamma.financial.view;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
import static org.testng.AssertJUnit.assertNotNull;
import static org.testng.AssertJUnit.assertNull;
import static org.testng.AssertJUnit.assertTrue;
import java.util.Collection;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import javax.time.Instant;
import org.testng.annotations.Test;
import com.opengamma.core.change.ChangeEvent;
import com.opengamma.core.change.ChangeListener;
import com.opengamma.core.change.ChangeManager;
import com.opengamma.core.change.ChangeProvider;
import com.opengamma.core.change.ChangeType;
import com.opengamma.engine.ComputationTarget;
import com.opengamma.engine.function.CachingFunctionRepositoryCompiler;
import com.opengamma.engine.function.CompiledFunctionService;
import com.opengamma.engine.function.FunctionCompilationContext;
import com.opengamma.engine.function.InMemoryFunctionRepository;
import com.opengamma.engine.test.MockFunction;
import com.opengamma.engine.view.ViewDefinitionRepository;
import com.opengamma.engine.view.ViewProcess;
import com.opengamma.engine.view.ViewProcessorInternal;
import com.opengamma.engine.view.calc.EngineResourceManager;
import com.opengamma.engine.view.calc.ViewCycle;
import com.opengamma.engine.view.client.ViewClient;
import com.opengamma.engine.view.event.ViewProcessorEventListenerRegistry;
import com.opengamma.id.UniqueId;
import com.opengamma.id.VersionCorrection;
import com.opengamma.livedata.UserPrincipal;
import com.opengamma.master.VersionedSource;
import com.opengamma.util.test.Timeout;
/**
* Test the ViewProcessorManager class.
*/
public class ViewProcessorManagerTest {
//-------------------------------------------------------------------------
private static class MockViewProcessor implements ViewProcessorInternal {
private final CompiledFunctionService _compiledFunctionService;
private final LinkedBlockingQueue<Boolean> _suspendState = new LinkedBlockingQueue<Boolean>();
private boolean _running;
private boolean _suspended;
public MockViewProcessor() {
final InMemoryFunctionRepository functions = new InMemoryFunctionRepository();
_compiledFunctionService = new CompiledFunctionService(functions, new CachingFunctionRepositoryCompiler(), new FunctionCompilationContext());
functions.addFunction(new MockFunction("mock", new ComputationTarget("Foo")) {
@Override
public void init(final FunctionCompilationContext context) {
context.getFunctionReinitializer().reinitializeFunction(getFunctionDefinition(), UniqueId.of("Test", "Watched"));
}
});
}
@Override
public Future<Runnable> suspend(final ExecutorService executorService) {
return executorService.submit(new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertFalse(_suspended);
_suspended = true;
_suspendState.add(Boolean.TRUE);
}
}
}, (Runnable) new Runnable() {
@Override
public void run() {
synchronized (MockViewProcessor.this) {
assertTrue(_running);
assertTrue(_suspended);
_suspended = false;
_suspendState.add(Boolean.FALSE);
}
}
});
}
@Override
public synchronized boolean isRunning() {
return _running;
}
@Override
public synchronized void start() {
assertFalse(_running);
_running = true;
}
@Override
public synchronized void stop() {
assertTrue(_running);
_running = false;
}
public Boolean isSuspended(final long timeout) throws InterruptedException {
return _suspendState.poll(timeout, TimeUnit.MILLISECONDS);
}
@Override
public UniqueId getUniqueId() {
return null;
}
@Override
public ViewDefinitionRepository getViewDefinitionRepository() {
return null;
}
@Override
public Collection<? extends ViewProcess> getViewProcesses() {
return null;
}
@Override
public ViewProcess getViewProcess(UniqueId viewProcessId) {
return null;
}
@Override
public Collection<ViewClient> getViewClients() {
return null;
}
@Override
public ViewClient createViewClient(UserPrincipal clientUser) {
return null;
}
@Override
public ViewClient getViewClient(UniqueId clientId) {
return null;
}
@Override
public CompiledFunctionService getFunctionCompilationService() {
return _compiledFunctionService;
}
@Override
public ViewProcessorEventListenerRegistry getViewProcessorEventListenerRegistry() {
return null;
}
@Override
public EngineResourceManager<ViewCycle> getViewCycleManager() {
return null;
}
}
//-------------------------------------------------------------------------
private static final class MockChangeManager implements ChangeManager {
private ChangeListener _listener;
@Override
public void addChangeListener(ChangeListener listener) {
assertNull(_listener);
_listener = listener;
}
@Override
public void removeChangeListener(ChangeListener listener) {
assertEquals(listener, _listener);
_listener = null;
}
public boolean hasListener() {
return _listener != null;
}
@Override
<<<<<<< MINE
public void entityChanged(ChangeType type, UniqueIdentifier beforeId, UniqueIdentifier afterId, Instant versionInstant) {
=======
public void masterChanged(MasterChangedType type, UniqueId beforeId, UniqueId afterId, Instant versionInstant) {
>>>>>>> YOURS
}
public void notifyListenerUnwatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Unwatched"), UniqueIdentifier.of("Test", "UnwatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Unwatched"), UniqueId.of("Test", "UnwatchedNew"), Instant.now()));
>>>>>>> YOURS
}
public void notifyListenerWatchedIdentifier() {
<<<<<<< MINE
_listener.entityChanged(new ChangeEvent(ChangeType.UPDATED, UniqueIdentifier.of("Test", "Watched"), UniqueIdentifier.of("Test", "WatchedNew"), Instant.now()));
=======
_listener.masterChanged(new MasterChanged(MasterChangedType.UPDATED, UniqueId.of("Test", "Watched"), UniqueId.of("Test", "WatchedNew"), Instant.now()));
>>>>>>> YOURS
}
}
//-------------------------------------------------------------------------
private static class MockNotifyingMaster implements ChangeProvider {
private ChangeManager _changeManager = new MockChangeManager();
@Override
public ChangeManager changeManager() {
return _changeManager;
}
}
//-------------------------------------------------------------------------
private static class MockVersionedSource implements VersionedSource {
private final LinkedBlockingQueue<VersionCorrection> _versionCorrections = new LinkedBlockingQueue<VersionCorrection>();
@Override
public void setVersionCorrection(VersionCorrection versionCorrection) {
_versionCorrections.add(versionCorrection);
}
public VersionCorrection getVersionCorrection() throws InterruptedException {
return _versionCorrections.poll(Timeout.standardTimeoutMillis(), TimeUnit.MILLISECONDS);
}
}
//-------------------------------------------------------------------------
@Test
public void testBasicOperation() throws InterruptedException {
final ViewProcessorManager vpm = new ViewProcessorManager();
final MockViewProcessor vp = new MockViewProcessor();
vpm.setViewProcessor(vp);
final MockNotifyingMaster master = new MockNotifyingMaster();
final MockChangeManager changeManger = (MockChangeManager) master.changeManager();
final MockVersionedSource source = new MockVersionedSource();
vpm.setMasterAndSource(master, source);
// Check normal startup
vpm.start();
assertTrue(changeManger.hasListener());
assertTrue(vpm.isRunning());
assertTrue(vp.isRunning());
Long initialId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
assertNotNull(initialId);
VersionCorrection initialVersion = source.getVersionCorrection();
// Notify it of a change to the master
Thread.sleep(10);
changeManger.notifyListenerUnwatchedIdentifier();
assertNull(vp.isSuspended(Timeout.standardTimeoutMillis()));
changeManger.notifyListenerWatchedIdentifier();
assertEquals(Boolean.TRUE, vp.isSuspended(Timeout.standardTimeoutMillis()));
VersionCorrection newVersion = source.getVersionCorrection();
assertTrue(newVersion.getVersionAsOf().isAfter(initialVersion.getVersionAsOf()));
Long newId = 0L;
for (int i = 0; i < 10; i++) {
Thread.sleep(Timeout.standardTimeoutMillis() / 10);
newId = vp.getFunctionCompilationService().getFunctionCompilationContext().getFunctionInitId();
}
assertTrue(newId > initialId);
assertEquals(Boolean.FALSE, vp.isSuspended(Timeout.standardTimeoutMillis()));
// Shutdown
vpm.stop();
assertFalse(vpm.isRunning());
assertFalse(vp.isRunning());
assertFalse(changeManger.hasListener());
}
}
Diff Result
No diff
Case 18 - ogplatform.rev_73c69_b21a5.UniqueIdentifierTest.java
void test_fudgeEncoding
Left: renamed to test_fudgeEncodingWithVersion and modified body. Also added another similar version, test_fudgeEncodingNoVersion.
Right: renamed to test_fudgeEncoding_version and modified body. Also added another similar version, test_fudgeEncoding_noVersion.
Unstructured reported conflict on signatures
Safe kept all versions
MergeMethods reported conflict of whole method test_fudgeEncodingNoVersion. Kept both versions of test_fudgeEncodingWithVersion.
Left added parameter to signature and modified body
Right modified body
Unstructured merged signature and body (different areas)
Safe kept both versions
MergeMethods kept both versions
KeepBothMethods kept both versions
boolean addResult
Left modified parameter type and modified body
Right modified parameter type and modified body
Unstructured reported conflict on signature
Safe kept both versions
MergeMethods reported conflict of whole methods
KeepBothMethods kept both versions
Base
package com.orientechnologies.orient.core.index.engine;
import java.util.*;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTreeBucket;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine<V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, boolean isAutomatic) {
acquireExclusiveLock();
try {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1, OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
sbTree.load(indexName, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new MapEntryIterator<V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new InverseMapEntryIterator<V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final MapEntryIterator<V> entryIterator = new MapEntryIterator<V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final InverseMapEntryIterator<V> entryIterator = new InverseMapEntryIterator<V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final MapEntryIterator<V> entryIterator = new MapEntryIterator<V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
if (transformer == null) {
if (entry.value.equals(value))
keySetToRemove.add(entry.key);
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.value);
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.key);
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
addToResult(transformer, result, entry.value, maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
addToResult(transformer, result, entry.value, maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
addToResult(transformer, result, entry.value, maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
final Object key = entry.key;
final V value = entry.value;
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
final Object key = entry.key;
final V value = entry.value;
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
final Object key = entry.key;
final V value = entry.value;
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
private static final class MapEntryIterator<V> implements Iterator<Map.Entry<Object, V>> {
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private final OSBTree<Object, V> sbTree;
private Object firstKey;
MapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
firstKey = sbTree.firstKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMajor(firstKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final OSBTreeBucket.SBTreeEntry<Object, V> entry) {
preFetchedValues.add(new Map.Entry<Object, V>() {
@Override
public Object getKey() {
return entry.key;
}
@Override
public V getValue() {
return entry.value;
}
@Override
public V setValue(V v) {
throw new UnsupportedOperationException("setValue");
}
});
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
firstKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
private static final class InverseMapEntryIterator<V> implements Iterator<Map.Entry<Object, V>> {
private final OSBTree<Object, V> sbTree;
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private Object lastKey;
InverseMapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
lastKey = sbTree.lastKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMinor(lastKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final OSBTreeBucket.SBTreeEntry<Object, V> entry) {
preFetchedValues.add(new Map.Entry<Object, V>() {
@Override
public Object getKey() {
return entry.key;
}
@Override
public V getValue() {
return entry.value;
}
@Override
public V setValue(V v) {
throw new UnsupportedOperationException("setValue");
}
});
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
lastKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
private static final class ItemsCounter<V> implements OSBTree.RangeResultListener<Object, V> {
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.value).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
}
}
package com.orientechnologies.orient.core.index.engine;
import java.util.*;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTreeBucket;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine<V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, boolean isAutomatic) {
acquireExclusiveLock();
try {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1, OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
sbTree.load(indexName, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new MapEntryIterator<V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new InverseMapEntryIterator<V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final MapEntryIterator<V> entryIterator = new MapEntryIterator<V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final InverseMapEntryIterator<V> entryIterator = new InverseMapEntryIterator<V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final MapEntryIterator<V> entryIterator = new MapEntryIterator<V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
if (transformer == null) {
if (entry.value.equals(value))
keySetToRemove.add(entry.key);
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.value);
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.key);
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
addToResult(transformer, result, entry.value, maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
addToResult(transformer, result, entry.value, maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
addToResult(transformer, result, entry.value, maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
final Object key = entry.key;
final V value = entry.value;
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
final Object key = entry.key;
final V value = entry.value;
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
final Object key = entry.key;
final V value = entry.value;
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
private static final class MapEntryIterator<V> implements Iterator<Map.Entry<Object, V>> {
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private final OSBTree<Object, V> sbTree;
private Object firstKey;
MapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
firstKey = sbTree.firstKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMajor(firstKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final OSBTreeBucket.SBTreeEntry<Object, V> entry) {
preFetchedValues.add(new Map.Entry<Object, V>() {
@Override
public Object getKey() {
return entry.key;
}
@Override
public V getValue() {
return entry.value;
}
@Override
public V setValue(V v) {
throw new UnsupportedOperationException("setValue");
}
});
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
firstKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
private static final class InverseMapEntryIterator<V> implements Iterator<Map.Entry<Object, V>> {
private final OSBTree<Object, V> sbTree;
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private Object lastKey;
InverseMapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
lastKey = sbTree.lastKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMinor(lastKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final OSBTreeBucket.SBTreeEntry<Object, V> entry) {
preFetchedValues.add(new Map.Entry<Object, V>() {
@Override
public Object getKey() {
return entry.key;
}
@Override
public V getValue() {
return entry.value;
}
@Override
public V setValue(V v) {
throw new UnsupportedOperationException("setValue");
}
});
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
lastKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
private static final class ItemsCounter<V> implements OSBTree.RangeResultListener<Object, V> {
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
@Override
public boolean addResult(OSBTreeBucket.SBTreeEntry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.value).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
}
}
Left
package com.orientechnologies.orient.core.index.engine;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine<V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, OIndexDefinition indexDefinition, boolean isAutomatic) {
acquireExclusiveLock();
try {
final int keySize;
if (indexDefinition == null || indexDefinition instanceof ORuntimeKeyIndexDefinition)
keySize = 1;
else
keySize = indexDefinition.getTypes().length;
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, keySize,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
sbTree.load(indexName, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new MapEntryIterator<V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new InverseMapEntryIterator<V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final MapEntryIterator<V> entryIterator = new MapEntryIterator<V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final InverseMapEntryIterator<V> entryIterator = new InverseMapEntryIterator<V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final MapEntryIterator<V> entryIterator = new MapEntryIterator<V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
if (transformer == null) {
if (entry.getValue().equals(value))
keySetToRemove.add(entry.getKey());
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.getValue());
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.getKey());
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
private static final class MapEntryIterator<V> implements Iterator<Map.Entry<Object, V>> {
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private final OSBTree<Object, V> sbTree;
private Object firstKey;
MapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
firstKey = sbTree.firstKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMajor(firstKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
firstKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
private static final class InverseMapEntryIterator<V> implements Iterator<Map.Entry<Object, V>> {
private final OSBTree<Object, V> sbTree;
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private Object lastKey;
InverseMapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
lastKey = sbTree.lastKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMinor(lastKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
lastKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
private static final class ItemsCounter<V> implements OSBTree.RangeResultListener<Object, V> {
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
}
}
package com.orientechnologies.orient.core.index.engine;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine<V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, OIndexDefinition indexDefinition, boolean isAutomatic) {
acquireExclusiveLock();
try {
final int keySize;
if (indexDefinition == null || indexDefinition instanceof ORuntimeKeyIndexDefinition)
keySize = 1;
else
keySize = indexDefinition.getTypes().length;
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, keySize,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
sbTree.load(indexName, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new MapEntryIterator<V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new InverseMapEntryIterator<V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final MapEntryIterator<V> entryIterator = new MapEntryIterator<V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final InverseMapEntryIterator<V> entryIterator = new InverseMapEntryIterator<V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final MapEntryIterator<V> entryIterator = new MapEntryIterator<V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
if (transformer == null) {
if (entry.getValue().equals(value))
keySetToRemove.add(entry.getKey());
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.getValue());
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.getKey());
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
private static final class MapEntryIterator<V> implements Iterator<Map.Entry<Object, V>> {
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private final OSBTree<Object, V> sbTree;
private Object firstKey;
MapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
firstKey = sbTree.firstKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMajor(firstKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
firstKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
private static final class InverseMapEntryIterator<V> implements Iterator<Map.Entry<Object, V>> {
private final OSBTree<Object, V> sbTree;
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private Object lastKey;
InverseMapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
lastKey = sbTree.lastKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMinor(lastKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
lastKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
private static final class ItemsCounter<V> implements OSBTree.RangeResultListener<Object, V> {
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
}
}
Right
/*
* Copyright 2010-2012 Luca Garulli (l.garulli(at)orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.index.engine;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeInverseMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OTree;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine<V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, 0, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, boolean isAutomatic) {
acquireExclusiveLock();
try {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1, OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
sbTree.load(indexName, 0, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new OSBTreeMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new OSBTreeInverseMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeInverseMapEntryIterator<Object, V> entryIterator = new OSBTreeInverseMapEntryIterator<Object, V>(
sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
if (transformer == null) {
if (entry.getValue().equals(value))
keySetToRemove.add(entry.getKey());
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.getValue());
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.getKey());
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
private static final class ItemsCounter<V> implements OTree.RangeResultListener<Object, V> {
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
}
}
/*
* Copyright 2010-2012 Luca Garulli (l.garulli(at)orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.index.engine;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeInverseMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OTree;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine<V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, 0, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, boolean isAutomatic) {
acquireExclusiveLock();
try {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1, OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
sbTree.load(indexName, 0, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new OSBTreeMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new OSBTreeInverseMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeInverseMapEntryIterator<Object, V> entryIterator = new OSBTreeInverseMapEntryIterator<Object, V>(
sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
if (transformer == null) {
if (entry.getValue().equals(value))
keySetToRemove.add(entry.getKey());
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.getValue());
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.getKey());
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
private static final class ItemsCounter<V> implements OTree.RangeResultListener<Object, V> {
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
}
}
MergeMethods
package com.orientechnologies.orient.core.index.engine;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeInverseMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OTree;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine <V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
<<<<<<< MINE
private static final class InverseMapEntryIterator <V> implements Iterator<Map.Entry<Object, V>> {
private final OSBTree<Object, V> sbTree;
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private Object lastKey;
InverseMapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
lastKey = sbTree.lastKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMinor(lastKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
lastKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
=======
>>>>>>> YOURS
<<<<<<< MINE
private static final class MapEntryIterator <V> implements Iterator<Map.Entry<Object, V>> {
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private final OSBTree<Object, V> sbTree;
private Object firstKey;
MapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
firstKey = sbTree.firstKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMajor(firstKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
firstKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
=======
>>>>>>> YOURS
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, 0, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, OIndexDefinition indexDefinition, boolean isAutomatic) {
acquireExclusiveLock();
try {
final int keySize;
if (indexDefinition == null || indexDefinition instanceof ORuntimeKeyIndexDefinition)
keySize = 1;
else
keySize = indexDefinition.getTypes().length;
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, keySize,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
sbTree.load(indexName, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void load(ORID indexRid, String indexName, boolean isAutomatic) {
acquireExclusiveLock();
try {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1, OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
sbTree.load(indexName, 0, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new OSBTreeMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new OSBTreeInverseMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeInverseMapEntryIterator<Object, V> entryIterator = new OSBTreeInverseMapEntryIterator<Object, V>(
sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
if (transformer == null) {
if (entry.getValue().equals(value))
keySetToRemove.add(entry.getKey());
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.getValue());
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.getKey());
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
private static final class ItemsCounter <V> implements OTree.RangeResultListener<Object, V> {
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
<<<<<<< MINE
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
=======
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
>>>>>>> YOURS
}
}
package com.orientechnologies.orient.core.index.engine;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeInverseMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OTree;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine <V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
<<<<<<< MINE
private static final class InverseMapEntryIterator <V> implements Iterator<Map.Entry<Object, V>> {
private final OSBTree<Object, V> sbTree;
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private Object lastKey;
InverseMapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
lastKey = sbTree.lastKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMinor(lastKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
lastKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
=======
>>>>>>> YOURS
<<<<<<< MINE
private static final class MapEntryIterator <V> implements Iterator<Map.Entry<Object, V>> {
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private final OSBTree<Object, V> sbTree;
private Object firstKey;
MapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
firstKey = sbTree.firstKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMajor(firstKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
firstKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
=======
>>>>>>> YOURS
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, 0, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, OIndexDefinition indexDefinition, boolean isAutomatic) {
acquireExclusiveLock();
try {
final int keySize;
if (indexDefinition == null || indexDefinition instanceof ORuntimeKeyIndexDefinition)
keySize = 1;
else
keySize = indexDefinition.getTypes().length;
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, keySize,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
sbTree.load(indexName, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void load(ORID indexRid, String indexName, boolean isAutomatic) {
acquireExclusiveLock();
try {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1, OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
sbTree.load(indexName, 0, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new OSBTreeMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new OSBTreeInverseMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeInverseMapEntryIterator<Object, V> entryIterator = new OSBTreeInverseMapEntryIterator<Object, V>(
sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
if (transformer == null) {
if (entry.getValue().equals(value))
keySetToRemove.add(entry.getKey());
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.getValue());
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.getKey());
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
private static final class ItemsCounter <V> implements OTree.RangeResultListener<Object, V> {
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
<<<<<<< MINE
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
=======
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
>>>>>>> YOURS
}
}
KeepBothMethods
package com.orientechnologies.orient.core.index.engine;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeInverseMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OTree;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine <V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
<<<<<<< MINE
private static final class InverseMapEntryIterator <V> implements Iterator<Map.Entry<Object, V>> {
private final OSBTree<Object, V> sbTree;
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private Object lastKey;
InverseMapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
lastKey = sbTree.lastKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMinor(lastKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
lastKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
=======
>>>>>>> YOURS
<<<<<<< MINE
private static final class MapEntryIterator <V> implements Iterator<Map.Entry<Object, V>> {
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private final OSBTree<Object, V> sbTree;
private Object firstKey;
MapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
firstKey = sbTree.firstKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMajor(firstKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
firstKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
=======
>>>>>>> YOURS
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, 0, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, OIndexDefinition indexDefinition, boolean isAutomatic) {
acquireExclusiveLock();
try {
final int keySize;
if (indexDefinition == null || indexDefinition instanceof ORuntimeKeyIndexDefinition)
keySize = 1;
else
keySize = indexDefinition.getTypes().length;
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, keySize,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
sbTree.load(indexName, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void load(ORID indexRid, String indexName, boolean isAutomatic) {
acquireExclusiveLock();
try {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1, OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
sbTree.load(indexName, 0, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new OSBTreeMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new OSBTreeInverseMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeInverseMapEntryIterator<Object, V> entryIterator = new OSBTreeInverseMapEntryIterator<Object, V>(
sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
if (transformer == null) {
if (entry.getValue().equals(value))
keySetToRemove.add(entry.getKey());
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.getValue());
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.getKey());
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
private static final class ItemsCounter <V> implements OTree.RangeResultListener<Object, V> {
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
}
}
package com.orientechnologies.orient.core.index.engine;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeInverseMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OTree;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine <V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
<<<<<<< MINE
private static final class InverseMapEntryIterator <V> implements Iterator<Map.Entry<Object, V>> {
private final OSBTree<Object, V> sbTree;
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private Object lastKey;
InverseMapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
lastKey = sbTree.lastKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMinor(lastKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
lastKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
=======
>>>>>>> YOURS
<<<<<<< MINE
private static final class MapEntryIterator <V> implements Iterator<Map.Entry<Object, V>> {
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private final OSBTree<Object, V> sbTree;
private Object firstKey;
MapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
firstKey = sbTree.firstKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMajor(firstKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
firstKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
=======
>>>>>>> YOURS
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, 0, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, OIndexDefinition indexDefinition, boolean isAutomatic) {
acquireExclusiveLock();
try {
final int keySize;
if (indexDefinition == null || indexDefinition instanceof ORuntimeKeyIndexDefinition)
keySize = 1;
else
keySize = indexDefinition.getTypes().length;
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, keySize,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
sbTree.load(indexName, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void load(ORID indexRid, String indexName, boolean isAutomatic) {
acquireExclusiveLock();
try {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1, OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
sbTree.load(indexName, 0, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new OSBTreeMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new OSBTreeInverseMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeInverseMapEntryIterator<Object, V> entryIterator = new OSBTreeInverseMapEntryIterator<Object, V>(
sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
if (transformer == null) {
if (entry.getValue().equals(value))
keySetToRemove.add(entry.getKey());
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.getValue());
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.getKey());
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
private static final class ItemsCounter <V> implements OTree.RangeResultListener<Object, V> {
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
}
}
Safe
package com.orientechnologies.orient.core.index.engine;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeInverseMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OTree;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine <V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
<<<<<<< MINE
private static final class InverseMapEntryIterator <V> implements Iterator<Map.Entry<Object, V>> {
private final OSBTree<Object, V> sbTree;
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private Object lastKey;
InverseMapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
lastKey = sbTree.lastKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMinor(lastKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
lastKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
=======
>>>>>>> YOURS
<<<<<<< MINE
private static final class MapEntryIterator <V> implements Iterator<Map.Entry<Object, V>> {
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private final OSBTree<Object, V> sbTree;
private Object firstKey;
MapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
firstKey = sbTree.firstKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMajor(firstKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
firstKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
=======
>>>>>>> YOURS
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, 0, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, OIndexDefinition indexDefinition, boolean isAutomatic) {
acquireExclusiveLock();
try {
final int keySize;
if (indexDefinition == null || indexDefinition instanceof ORuntimeKeyIndexDefinition)
keySize = 1;
else
keySize = indexDefinition.getTypes().length;
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, keySize,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
sbTree.load(indexName, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void load(ORID indexRid, String indexName, boolean isAutomatic) {
acquireExclusiveLock();
try {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1, OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
sbTree.load(indexName, 0, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new OSBTreeMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new OSBTreeInverseMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeInverseMapEntryIterator<Object, V> entryIterator = new OSBTreeInverseMapEntryIterator<Object, V>(
sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
if (transformer == null) {
if (entry.getValue().equals(value))
keySetToRemove.add(entry.getKey());
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.getValue());
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.getKey());
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
private static final class ItemsCounter <V> implements OTree.RangeResultListener<Object, V> {
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
}
}
package com.orientechnologies.orient.core.index.engine;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeInverseMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OTree;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine <V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
<<<<<<< MINE
private static final class InverseMapEntryIterator <V> implements Iterator<Map.Entry<Object, V>> {
private final OSBTree<Object, V> sbTree;
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private Object lastKey;
InverseMapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
lastKey = sbTree.lastKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMinor(lastKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
lastKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
=======
>>>>>>> YOURS
<<<<<<< MINE
private static final class MapEntryIterator <V> implements Iterator<Map.Entry<Object, V>> {
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private final OSBTree<Object, V> sbTree;
private Object firstKey;
MapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
firstKey = sbTree.firstKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMajor(firstKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
firstKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
=======
>>>>>>> YOURS
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, 0, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, OIndexDefinition indexDefinition, boolean isAutomatic) {
acquireExclusiveLock();
try {
final int keySize;
if (indexDefinition == null || indexDefinition instanceof ORuntimeKeyIndexDefinition)
keySize = 1;
else
keySize = indexDefinition.getTypes().length;
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, keySize,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
sbTree.load(indexName, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void load(ORID indexRid, String indexName, boolean isAutomatic) {
acquireExclusiveLock();
try {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1, OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
sbTree.load(indexName, 0, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new OSBTreeMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new OSBTreeInverseMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeInverseMapEntryIterator<Object, V> entryIterator = new OSBTreeInverseMapEntryIterator<Object, V>(
sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
if (transformer == null) {
if (entry.getValue().equals(value))
keySetToRemove.add(entry.getKey());
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.getValue());
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.getKey());
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
private static final class ItemsCounter <V> implements OTree.RangeResultListener<Object, V> {
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
@Override
public boolean addResult(Map.Entry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
@Override
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
}
}
Unstructured
/*
* Copyright 2010-2012 Luca Garulli (l.garulli(at)orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.index.engine;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
<<<<<<< MINE
import java.util.LinkedList;
=======
>>>>>>> YOURS
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeInverseMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OTree;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine<V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, 0, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, OIndexDefinition indexDefinition, boolean isAutomatic) {
acquireExclusiveLock();
try {
final int keySize;
if (indexDefinition == null || indexDefinition instanceof ORuntimeKeyIndexDefinition)
keySize = 1;
else
keySize = indexDefinition.getTypes().length;
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, keySize,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
sbTree.load(indexName, 0, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new OSBTreeMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new OSBTreeInverseMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeInverseMapEntryIterator<Object, V> entryIterator = new OSBTreeInverseMapEntryIterator<Object, V>(
sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
if (transformer == null) {
if (entry.getValue().equals(value))
keySetToRemove.add(entry.getKey());
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.getValue());
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.getKey());
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
<<<<<<< MINE
private static final class MapEntryIterator<V> implements Iterator<Map.Entry<Object, V>> {
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private final OSBTree<Object, V> sbTree;
private Object firstKey;
MapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
firstKey = sbTree.firstKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMajor(firstKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
firstKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
private static final class InverseMapEntryIterator<V> implements Iterator<Map.Entry<Object, V>> {
private final OSBTree<Object, V> sbTree;
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private Object lastKey;
InverseMapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
lastKey = sbTree.lastKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMinor(lastKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
lastKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
private static final class ItemsCounter<V> implements OSBTree.RangeResultListener<Object, V> {
=======
private static final class ItemsCounter<V> implements OTree.RangeResultListener<Object, V> {
>>>>>>> YOURS
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
}
}/*
* Copyright 2010-2012 Luca Garulli (l.garulli(at)orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.index.engine;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
<<<<<<< MINE
import java.util.LinkedList;
=======
>>>>>>> YOURS
import java.util.Map;
import java.util.Set;
import com.orientechnologies.common.concur.resource.OSharedResourceAdaptiveExternal;
import com.orientechnologies.common.serialization.types.OBinarySerializer;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.db.ODatabaseRecordThreadLocal;
import com.orientechnologies.orient.core.db.record.ODatabaseRecord;
import com.orientechnologies.orient.core.db.record.OIdentifiable;
import com.orientechnologies.orient.core.id.ORID;
import com.orientechnologies.orient.core.index.ODocumentFieldsHashSet;
import com.orientechnologies.orient.core.index.OIndexDefinition;
import com.orientechnologies.orient.core.index.OIndexEngine;
import com.orientechnologies.orient.core.index.ORuntimeKeyIndexDefinition;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeInverseMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OSBTreeMapEntryIterator;
import com.orientechnologies.orient.core.index.sbtree.OTree;
import com.orientechnologies.orient.core.index.sbtree.local.OSBTree;
import com.orientechnologies.orient.core.record.impl.ODocument;
import com.orientechnologies.orient.core.record.impl.ORecordBytes;
import com.orientechnologies.orient.core.serialization.serializer.binary.OBinarySerializerFactory;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OCompositeKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.binary.impl.index.OSimpleKeySerializer;
import com.orientechnologies.orient.core.serialization.serializer.stream.OStreamSerializer;
import com.orientechnologies.orient.core.storage.impl.local.OStorageLocalAbstract;
/**
* @author Andrey Lomakin
* @since 8/30/13
*/
public class OSBTreeIndexEngine<V> extends OSharedResourceAdaptiveExternal implements OIndexEngine<V> {
public static final String DATA_FILE_EXTENSION = ".sbt";
private ORID identity;
private OSBTree<Object, V> sbTree;
public OSBTreeIndexEngine() {
super(OGlobalConfiguration.ENVIRONMENT_CONCURRENT.getValueAsBoolean(), OGlobalConfiguration.MVRBTREE_TIMEOUT
.getValueAsInteger(), true);
}
@Override
public void init() {
}
@Override
public void flush() {
acquireSharedLock();
try {
sbTree.flush();
} finally {
releaseSharedLock();
}
}
@Override
public void create(String indexName, OIndexDefinition indexDefinition, String clusterIndexName,
OStreamSerializer valueSerializer, boolean isAutomatic) {
acquireExclusiveLock();
try {
final OBinarySerializer keySerializer;
if (indexDefinition != null) {
if (indexDefinition instanceof ORuntimeKeyIndexDefinition) {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = ((ORuntimeKeyIndexDefinition) indexDefinition).getSerializer();
} else {
if (indexDefinition.getTypes().length > 1) {
keySerializer = OCompositeKeySerializer.INSTANCE;
} else {
keySerializer = OBinarySerializerFactory.INSTANCE.getObjectSerializer(indexDefinition.getTypes()[0]);
}
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, indexDefinition.getTypes().length,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
}
} else {
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, 1,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
keySerializer = new OSimpleKeySerializer();
}
final ORecordBytes identityRecord = new ORecordBytes();
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage();
database.save(identityRecord, clusterIndexName);
identity = identityRecord.getIdentity();
sbTree.create(indexName, 0, keySerializer, (OBinarySerializer<V>) valueSerializer, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public void delete() {
acquireSharedLock();
try {
sbTree.delete();
} finally {
releaseSharedLock();
}
}
@Override
public void load(ORID indexRid, String indexName, OIndexDefinition indexDefinition, boolean isAutomatic) {
acquireExclusiveLock();
try {
final int keySize;
if (indexDefinition == null || indexDefinition instanceof ORuntimeKeyIndexDefinition)
keySize = 1;
else
keySize = indexDefinition.getTypes().length;
sbTree = new OSBTree<Object, V>(DATA_FILE_EXTENSION, keySize,
OGlobalConfiguration.INDEX_DURABLE_IN_NON_TX_MODE.getValueAsBoolean());
ODatabaseRecord database = getDatabase();
final OStorageLocalAbstract storageLocalAbstract = (OStorageLocalAbstract) database.getStorage().getUnderlying();
sbTree.load(indexName, 0, storageLocalAbstract);
} finally {
releaseExclusiveLock();
}
}
@Override
public boolean contains(Object key) {
acquireSharedLock();
try {
return sbTree.get(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public boolean remove(Object key) {
acquireSharedLock();
try {
return sbTree.remove(key) != null;
} finally {
releaseSharedLock();
}
}
@Override
public ORID getIdentity() {
acquireSharedLock();
try {
return identity;
} finally {
releaseSharedLock();
}
}
@Override
public void clear() {
acquireSharedLock();
try {
sbTree.clear();
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> iterator() {
acquireSharedLock();
try {
return new OSBTreeMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<Map.Entry<Object, V>> inverseIterator() {
acquireSharedLock();
try {
return new OSBTreeInverseMapEntryIterator<Object, V>(sbTree);
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> valuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterator<V> inverseValuesIterator() {
acquireSharedLock();
try {
return new Iterator<V>() {
private final OSBTreeInverseMapEntryIterator<Object, V> entryIterator = new OSBTreeInverseMapEntryIterator<Object, V>(
sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public V next() {
return entryIterator.next().getValue();
}
@Override
public void remove() {
entryIterator.remove();
}
};
} finally {
releaseSharedLock();
}
}
@Override
public Iterable<Object> keys() {
acquireSharedLock();
try {
return new Iterable<Object>() {
@Override
public Iterator<Object> iterator() {
return new Iterator<Object>() {
final OSBTreeMapEntryIterator<Object, V> entryIterator = new OSBTreeMapEntryIterator<Object, V>(sbTree);
@Override
public boolean hasNext() {
return entryIterator.hasNext();
}
@Override
public Object next() {
return entryIterator.next().getKey();
}
@Override
public void remove() {
entryIterator.remove();
}
};
}
};
} finally {
releaseSharedLock();
}
}
@Override
public void unload() {
}
@Override
public void startTransaction() {
}
@Override
public void stopTransaction() {
}
@Override
public void afterTxRollback() {
}
@Override
public void afterTxCommit() {
}
@Override
public void closeDb() {
}
@Override
public void close() {
acquireSharedLock();
try {
sbTree.close();
} finally {
releaseSharedLock();
}
}
@Override
public void beforeTxBegin() {
}
@Override
public V get(Object key) {
acquireSharedLock();
try {
return sbTree.get(key);
} finally {
releaseSharedLock();
}
}
@Override
public void put(Object key, V value) {
acquireSharedLock();
try {
sbTree.put(key, value);
} finally {
releaseSharedLock();
}
}
@Override
public int removeValue(final OIdentifiable value, final ValuesTransformer<V> transformer) {
acquireExclusiveLock();
try {
final Set<Object> keySetToRemove = new HashSet<Object>();
if (sbTree.size() == 0)
return 0;
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
if (transformer == null) {
if (entry.getValue().equals(value))
keySetToRemove.add(entry.getKey());
} else {
Collection<OIdentifiable> identifiables = transformer.transformFromValue(entry.getValue());
for (OIdentifiable identifiable : identifiables) {
if (identifiable.equals(value))
keySetToRemove.add(entry.getKey());
}
}
return true;
}
});
for (Object keyToRemove : keySetToRemove)
sbTree.remove(keyToRemove);
return keySetToRemove.size();
} finally {
releaseExclusiveLock();
}
}
@Override
public Collection<OIdentifiable> getValuesBetween(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive,
final int maxValuesToFetch, final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMajor(Object fromKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<OIdentifiable> getValuesMinor(Object toKey, boolean isInclusive, final int maxValuesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<OIdentifiable> result = new HashSet<OIdentifiable>();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
addToResult(transformer, result, entry.getValue(), maxValuesToFetch);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMajor(Object fromKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMajor(fromKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesMinor(Object toKey, boolean isInclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesMinor(toKey, isInclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public Collection<ODocument> getEntriesBetween(Object rangeFrom, Object rangeTo, boolean inclusive, final int maxEntriesToFetch,
final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final Set<ODocument> result = new ODocumentFieldsHashSet();
sbTree.loadEntriesBetween(rangeFrom, inclusive, rangeTo, inclusive, new OSBTree.RangeResultListener<Object, V>() {
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
final Object key = entry.getKey();
final V value = entry.getValue();
addToEntriesResult(transformer, result, key, value, maxEntriesToFetch);
if (maxEntriesToFetch > -1 && result.size() == maxEntriesToFetch)
return false;
return true;
}
});
return result;
} finally {
releaseSharedLock();
}
}
@Override
public long size(final ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
if (transformer == null)
return sbTree.size();
else {
final ItemsCounter<V> counter = new ItemsCounter<V>(transformer, -1);
final Object firstKey = sbTree.firstKey();
final Object lastKey = sbTree.lastKey();
if (firstKey != null && lastKey != null) {
sbTree.loadEntriesBetween(firstKey, true, lastKey, true, counter);
return counter.count;
}
return 0;
}
} finally {
releaseSharedLock();
}
}
@Override
public long count(Object rangeFrom, boolean fromInclusive, Object rangeTo, boolean toInclusive, int maxValuesToFetch,
ValuesTransformer<V> transformer) {
acquireSharedLock();
try {
final ItemsCounter<V> itemsCounter = new ItemsCounter<V>(transformer, maxValuesToFetch);
if (rangeTo != null)
sbTree.loadEntriesBetween(rangeFrom, fromInclusive, rangeTo, toInclusive, itemsCounter);
else
sbTree.loadEntriesMajor(rangeFrom, fromInclusive, itemsCounter);
return itemsCounter.count;
} finally {
releaseSharedLock();
}
}
@Override
public boolean hasRangeQuerySupport() {
return true;
}
private ODatabaseRecord getDatabase() {
return ODatabaseRecordThreadLocal.INSTANCE.get();
}
private void addToResult(ValuesTransformer<V> transformer, Set<OIdentifiable> result, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
result.add(transformedValue);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else
result.add((OIdentifiable) value);
}
private void addToEntriesResult(ValuesTransformer<V> transformer, Set<ODocument> result, Object key, V value, int maxValuesToFetch) {
if (transformer != null) {
Collection<OIdentifiable> transformResult = transformer.transformFromValue(value);
for (OIdentifiable transformedValue : transformResult) {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", transformedValue.getIdentity());
document.unsetDirty();
result.add(document);
if (maxValuesToFetch > -1 && result.size() == maxValuesToFetch)
return;
}
} else {
final ODocument document = new ODocument();
document.field("key", key);
document.field("rid", ((OIdentifiable) value).getIdentity());
document.unsetDirty();
result.add(document);
}
}
<<<<<<< MINE
private static final class MapEntryIterator<V> implements Iterator<Map.Entry<Object, V>> {
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private final OSBTree<Object, V> sbTree;
private Object firstKey;
MapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
firstKey = sbTree.firstKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMajor(firstKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
firstKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
private static final class InverseMapEntryIterator<V> implements Iterator<Map.Entry<Object, V>> {
private final OSBTree<Object, V> sbTree;
private LinkedList<Map.Entry<Object, V>> preFetchedValues;
private Object lastKey;
InverseMapEntryIterator(OSBTree<Object, V> sbTree) {
this.sbTree = sbTree;
if (sbTree.size() == 0) {
this.preFetchedValues = null;
return;
}
this.preFetchedValues = new LinkedList<Map.Entry<Object, V>>();
lastKey = sbTree.lastKey();
prefetchData(true);
}
private void prefetchData(boolean firstTime) {
sbTree.loadEntriesMinor(lastKey, firstTime, new OSBTree.RangeResultListener<Object, V>() {
@Override
public boolean addResult(final Map.Entry<Object, V> entry) {
preFetchedValues.add(entry);
return preFetchedValues.size() <= 8000;
}
});
if (preFetchedValues.isEmpty())
preFetchedValues = null;
else
lastKey = preFetchedValues.getLast().getKey();
}
@Override
public boolean hasNext() {
return preFetchedValues != null;
}
@Override
public Map.Entry<Object, V> next() {
final Map.Entry<Object, V> entry = preFetchedValues.removeFirst();
if (preFetchedValues.isEmpty())
prefetchData(false);
return entry;
}
@Override
public void remove() {
throw new UnsupportedOperationException("remove");
}
}
private static final class ItemsCounter<V> implements OSBTree.RangeResultListener<Object, V> {
=======
private static final class ItemsCounter<V> implements OTree.RangeResultListener<Object, V> {
>>>>>>> YOURS
private final ValuesTransformer<V> valuesTransformer;
private final int maxValuesToFetch;
private ItemsCounter(ValuesTransformer<V> valuesTransformer, int maxValuesToFetch) {
this.valuesTransformer = valuesTransformer;
this.maxValuesToFetch = maxValuesToFetch;
}
private int count;
@Override
<<<<<<< MINE
public boolean addResult(Map.Entry<Object, V> entry) {
=======
public boolean addResult(OTree.BucketEntry<Object, V> entry) {
>>>>>>> YOURS
if (valuesTransformer != null)
count += valuesTransformer.transformFromValue(entry.getValue()).size();
else
count++;
if (maxValuesToFetch > 0 && count >= maxValuesToFetch)
return false;
return true;
}
}
}
Diff Result
No diff
Case 21 - realmjava.rev_528df_698a7.Table.java
TableDefinition getSubTableDefinition
Left added a method annotation
Right modified signature and body
Unstructured reported conflict between signatures
Safe reported conflict between signatures
MergeMethods merged signature and new body
Note: Left added annotation was lost
KeepBothMethods kept both versions
Base
package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
/*
Add isEqual(Table)
*/
/**
* This class is a base class for all TightDB tables. The class supports all low
* level methods (define/insert/delete/update) a table has. All the native
* communications to the TightDB C++ library are also handled by this class.
*
* A user who wants to create a table of his choice will automatically inherit
* from this class by the tightdb-class generator.
*
* As an example, let's create a table to keep records of an employee in a
* company.
*
* For this purpose we will create a class named "employee" with an Entity
* annotation as follows.
*
* @DefineTable
* public class employee {
* String name;
* long age;
* boolean hired;
* byte[] imageData;
* }
*
* The tightdb class generator will generate classes relevant to the employee:
*
* 1. Employee.java: Represents one employee of the employee table i.e., a single row. Getter/setter
* methods are declared from which you will be able to set/get values
* for a particular employee.
* 2. EmployeeTable.java: Represents the class for storing a collection of employee i.e., a table
* of rows. The class is inherited from the TableBase class as described above.
* It has all the high level methods to manipulate Employee objects from the table.
* 3. EmployeeView.java: Represents view of the employee table i.e., result set of queries.
*
*
*/
public class Table implements TableOrView, TableDefinition {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG) System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid(){
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
public TableDefinition getSubTableDefinition(long columnIndex) {
if(nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableDefinition(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
public long addColumn (ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
public void removeColumn(long columnIndex)
{
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable) throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
public void clear() {
if (immutable) throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec(){
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
public ColumnType getColumnType(long columnIndex)
{
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
public void remove(long rowIndex) {
if (immutable) throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
public void removeLast() {
if (immutable) throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable) throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable) throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable) throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int)columnIndex];
switch (colTypes[(int)columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean)value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number)value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float)value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double)value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String)value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date)value).getTime()/1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[])value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer)value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int)columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[])value).length;
for (int i=0; i<rows; ++i) {
Object rowArr = ((Object[])value)[i];
subtable.insert(i, (Object[])rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime()/1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.printf("\ninsertBinary(col %d, row %d, ByteBuffer)\n", columnIndex, rowIndex);
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable) throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable) throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable) throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex)*1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
public ColumnType getMixedType(long columnIndex, long rowIndex)
{
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable) throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
public void addLong(long columnIndex, long value) {
if (immutable) throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable) throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable) throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}
package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
/*
Add isEqual(Table)
*/
/**
* This class is a base class for all TightDB tables. The class supports all low
* level methods (define/insert/delete/update) a table has. All the native
* communications to the TightDB C++ library are also handled by this class.
*
* A user who wants to create a table of his choice will automatically inherit
* from this class by the tightdb-class generator.
*
* As an example, let's create a table to keep records of an employee in a
* company.
*
* For this purpose we will create a class named "employee" with an Entity
* annotation as follows.
*
* @DefineTable
* public class employee {
* String name;
* long age;
* boolean hired;
* byte[] imageData;
* }
*
* The tightdb class generator will generate classes relevant to the employee:
*
* 1. Employee.java: Represents one employee of the employee table i.e., a single row. Getter/setter
* methods are declared from which you will be able to set/get values
* for a particular employee.
* 2. EmployeeTable.java: Represents the class for storing a collection of employee i.e., a table
* of rows. The class is inherited from the TableBase class as described above.
* It has all the high level methods to manipulate Employee objects from the table.
* 3. EmployeeView.java: Represents view of the employee table i.e., result set of queries.
*
*
*/
public class Table implements TableOrView, TableDefinition {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG) System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid(){
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
public TableDefinition getSubTableDefinition(long columnIndex) {
if(nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableDefinition(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
public long addColumn (ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
public void removeColumn(long columnIndex)
{
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable) throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
public void clear() {
if (immutable) throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec(){
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
public ColumnType getColumnType(long columnIndex)
{
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
public void remove(long rowIndex) {
if (immutable) throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
public void removeLast() {
if (immutable) throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable) throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable) throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable) throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int)columnIndex];
switch (colTypes[(int)columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean)value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number)value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float)value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double)value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String)value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date)value).getTime()/1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[])value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer)value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int)columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[])value).length;
for (int i=0; i<rows; ++i) {
Object rowArr = ((Object[])value)[i];
subtable.insert(i, (Object[])rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime()/1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.printf("\ninsertBinary(col %d, row %d, ByteBuffer)\n", columnIndex, rowIndex);
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable) throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable) throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable) throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex)*1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
public ColumnType getMixedType(long columnIndex, long rowIndex)
{
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable) throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
public void addLong(long columnIndex, long value) {
if (immutable) throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable) throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable) throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}
Left
package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
/*
Add isEqual(Table)
*/
/**
* This class is a base class for all TightDB tables. The class supports all low
* level methods (define/insert/delete/update) a table has. All the native
* communications to the TightDB C++ library are also handled by this class.
*
* A user who wants to create a table of his choice will automatically inherit
* from this class by the tightdb-class generator.
*
* As an example, let's create a table to keep records of an employee in a
* company.
*
* For this purpose we will create a class named "employee" with an Entity
* annotation as follows.
*
* @DefineTable
* public class employee {
* String name;
* long age;
* boolean hired;
* byte[] imageData;
* }
*
* The tightdb class generator will generate classes relevant to the employee:
*
* 1. Employee.java: Represents one employee of the employee table i.e., a single row. Getter/setter
* methods are declared from which you will be able to set/get values
* for a particular employee.
* 2. EmployeeTable.java: Represents the class for storing a collection of employee i.e., a table
* of rows. The class is inherited from the TableBase class as described above.
* It has all the high level methods to manipulate Employee objects from the table.
* 3. EmployeeView.java: Represents view of the employee table i.e., result set of queries.
*
*
*/
public class Table implements TableOrView, TableDefinition {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG) System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid(){
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
@Override
public TableDefinition getSubTableDefinition(long columnIndex) {
if(nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableDefinition(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
@Override
public long addColumn (ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
@Override
public void removeColumn(long columnIndex) {
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
@Override
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable) throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
@Override
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
@Override
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
@Override
public void clear() {
if (immutable) throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
@Override
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec(){
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
@Override
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
@Override
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
@Override
public ColumnType getColumnType(long columnIndex) {
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
@Override
public void remove(long rowIndex) {
if (immutable) throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
@Override
public void removeLast() {
if (immutable) throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable) throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable) throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable) throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int)columnIndex];
switch (colTypes[(int)columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean)value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number)value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float)value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double)value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String)value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date)value).getTime()/1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[])value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer)value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int)columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[])value).length;
for (int i=0; i<rows; ++i) {
Object rowArr = ((Object[])value)[i];
subtable.insert(i, (Object[])rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime()/1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.printf("\ninsertBinary(col %d, row %d, ByteBuffer)\n", columnIndex, rowIndex);
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable) throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable) throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable) throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
@Override
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex)*1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
@Override
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
@Override
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
@Override
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
@Override
public ColumnType getMixedType(long columnIndex, long rowIndex) {
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
@Override
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable) throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
@Override
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
@Override
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
@Override
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
@Override
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
@Override
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
@Override
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
@Override
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
@Override
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
@Override
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
@Override
public void addLong(long columnIndex, long value) {
if (immutable) throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable) throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
@Override
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
@Override
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
@Override
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
@Override
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
@Override
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
@Override
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
@Override
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
@Override
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
@Override
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
@Override
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
@Override
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
@Override
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
@Override
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
@Override
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
@Override
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
@Override
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
@Override
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
@Override
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
@Override
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
@Override
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
@Override
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
@Override
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
@Override
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
@Override
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
@Override
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
@Override
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
@Override
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable) throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
@Override
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
@Override
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
@Override
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
@Override
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}
package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
/*
Add isEqual(Table)
*/
/**
* This class is a base class for all TightDB tables. The class supports all low
* level methods (define/insert/delete/update) a table has. All the native
* communications to the TightDB C++ library are also handled by this class.
*
* A user who wants to create a table of his choice will automatically inherit
* from this class by the tightdb-class generator.
*
* As an example, let's create a table to keep records of an employee in a
* company.
*
* For this purpose we will create a class named "employee" with an Entity
* annotation as follows.
*
* @DefineTable
* public class employee {
* String name;
* long age;
* boolean hired;
* byte[] imageData;
* }
*
* The tightdb class generator will generate classes relevant to the employee:
*
* 1. Employee.java: Represents one employee of the employee table i.e., a single row. Getter/setter
* methods are declared from which you will be able to set/get values
* for a particular employee.
* 2. EmployeeTable.java: Represents the class for storing a collection of employee i.e., a table
* of rows. The class is inherited from the TableBase class as described above.
* It has all the high level methods to manipulate Employee objects from the table.
* 3. EmployeeView.java: Represents view of the employee table i.e., result set of queries.
*
*
*/
public class Table implements TableOrView, TableDefinition {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG) System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid(){
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
@Override
public TableDefinition getSubTableDefinition(long columnIndex) {
if(nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableDefinition(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
@Override
public long addColumn (ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
@Override
public void removeColumn(long columnIndex) {
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
@Override
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable) throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
@Override
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
@Override
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
@Override
public void clear() {
if (immutable) throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
@Override
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec(){
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
@Override
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
@Override
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
@Override
public ColumnType getColumnType(long columnIndex) {
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
@Override
public void remove(long rowIndex) {
if (immutable) throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
@Override
public void removeLast() {
if (immutable) throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable) throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable) throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable) throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int)columnIndex];
switch (colTypes[(int)columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean)value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number)value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float)value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double)value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String)value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date)value).getTime()/1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[])value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer)value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int)columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[])value).length;
for (int i=0; i<rows; ++i) {
Object rowArr = ((Object[])value)[i];
subtable.insert(i, (Object[])rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime()/1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.printf("\ninsertBinary(col %d, row %d, ByteBuffer)\n", columnIndex, rowIndex);
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable) throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable) throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable) throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
@Override
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex)*1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
@Override
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
@Override
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
@Override
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
@Override
public ColumnType getMixedType(long columnIndex, long rowIndex) {
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
@Override
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable) throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
@Override
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
@Override
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
@Override
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
@Override
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
@Override
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
@Override
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
@Override
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
@Override
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
@Override
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
@Override
public void addLong(long columnIndex, long value) {
if (immutable) throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable) throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
@Override
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
@Override
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
@Override
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
@Override
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
@Override
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
@Override
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
@Override
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
@Override
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
@Override
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
@Override
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
@Override
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
@Override
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
@Override
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
@Override
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
@Override
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
@Override
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
@Override
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
@Override
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
@Override
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
@Override
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
@Override
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
@Override
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
@Override
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
@Override
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
@Override
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
@Override
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
@Override
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable) throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
@Override
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
@Override
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
@Override
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
@Override
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}
Right
package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
/*
Add isEqual(Table)
*/
/**
* This class is a base class for all TightDB tables. The class supports all low
* level methods (define/insert/delete/update) a table has. All the native
* communications to the TightDB C++ library are also handled by this class.
*
* A user who wants to create a table of his choice will automatically inherit
* from this class by the tightdb-class generator.
*
* As an example, let's create a table to keep records of an employee in a
* company.
*
* For this purpose we will create a class named "employee" with an Entity
* annotation as follows.
*
* @DefineTable
* public class employee {
* String name;
* long age;
* boolean hired;
* byte[] imageData;
* }
*
* The tightdb class generator will generate classes relevant to the employee:
*
* 1. Employee.java: Represents one employee of the employee table i.e., a single row. Getter/setter
* methods are declared from which you will be able to set/get values
* for a particular employee.
* 2. EmployeeTable.java: Represents the class for storing a collection of employee i.e., a table
* of rows. The class is inherited from the TableBase class as described above.
* It has all the high level methods to manipulate Employee objects from the table.
* 3. EmployeeView.java: Represents view of the employee table i.e., result set of queries.
*
*
*/
public class Table implements TableOrView, TableSchema {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG) System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid(){
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
public TableSchema getSubTableSchema(long columnIndex) {
if(nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableSchema(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
public long addColumn (ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
public void removeColumn(long columnIndex)
{
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable) throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
public void clear() {
if (immutable) throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec(){
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
public ColumnType getColumnType(long columnIndex)
{
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
public void remove(long rowIndex) {
if (immutable) throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
public void removeLast() {
if (immutable) throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable) throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable) throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable) throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int)columnIndex];
switch (colTypes[(int)columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean)value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number)value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float)value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double)value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String)value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date)value).getTime()/1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[])value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer)value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int)columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[])value).length;
for (int i=0; i<rows; ++i) {
Object rowArr = ((Object[])value)[i];
subtable.insert(i, (Object[])rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime()/1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.printf("\ninsertBinary(col %d, row %d, ByteBuffer)\n", columnIndex, rowIndex);
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable) throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable) throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable) throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex)*1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
public ColumnType getMixedType(long columnIndex, long rowIndex)
{
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable) throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
public void addLong(long columnIndex, long value) {
if (immutable) throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable) throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable) throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}
package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
/*
Add isEqual(Table)
*/
/**
* This class is a base class for all TightDB tables. The class supports all low
* level methods (define/insert/delete/update) a table has. All the native
* communications to the TightDB C++ library are also handled by this class.
*
* A user who wants to create a table of his choice will automatically inherit
* from this class by the tightdb-class generator.
*
* As an example, let's create a table to keep records of an employee in a
* company.
*
* For this purpose we will create a class named "employee" with an Entity
* annotation as follows.
*
* @DefineTable
* public class employee {
* String name;
* long age;
* boolean hired;
* byte[] imageData;
* }
*
* The tightdb class generator will generate classes relevant to the employee:
*
* 1. Employee.java: Represents one employee of the employee table i.e., a single row. Getter/setter
* methods are declared from which you will be able to set/get values
* for a particular employee.
* 2. EmployeeTable.java: Represents the class for storing a collection of employee i.e., a table
* of rows. The class is inherited from the TableBase class as described above.
* It has all the high level methods to manipulate Employee objects from the table.
* 3. EmployeeView.java: Represents view of the employee table i.e., result set of queries.
*
*
*/
public class Table implements TableOrView, TableSchema {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG) System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid(){
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
public TableSchema getSubTableSchema(long columnIndex) {
if(nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableSchema(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
public long addColumn (ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
public void removeColumn(long columnIndex)
{
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable) throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
public void clear() {
if (immutable) throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec(){
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
public ColumnType getColumnType(long columnIndex)
{
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
public void remove(long rowIndex) {
if (immutable) throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
public void removeLast() {
if (immutable) throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable) throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable) throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable) throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int)columnIndex];
switch (colTypes[(int)columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean)value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number)value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float)value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double)value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String)value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date)value).getTime()/1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[])value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer)value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int)columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[])value).length;
for (int i=0; i<rows; ++i) {
Object rowArr = ((Object[])value)[i];
subtable.insert(i, (Object[])rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime()/1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.printf("\ninsertBinary(col %d, row %d, ByteBuffer)\n", columnIndex, rowIndex);
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable) throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable) throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable) throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex)*1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
public ColumnType getMixedType(long columnIndex, long rowIndex)
{
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable) throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
public void addLong(long columnIndex, long value) {
if (immutable) throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable) throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable) throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}
MergeMethods
package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
public class Table implements TableOrView, TableSchema {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG)
System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid() {
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
public TableSchema getSubTableSchema(long columnIndex) {
if (nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableSchema(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
@Override
public long addColumn(ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
@Override
public void removeColumn(long columnIndex) {
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
@Override
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable)
throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
@Override
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
@Override
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
@Override
public void clear() {
if (immutable)
throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
@Override
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec() {
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
@Override
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
@Override
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
@Override
public ColumnType getColumnType(long columnIndex) {
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
@Override
public void remove(long rowIndex) {
if (immutable)
throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
@Override
public void removeLast() {
if (immutable)
throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable)
throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable)
throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable)
throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable)
throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) + " must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int) getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" + String.valueOf(values.length) + ") does not match the number of columns in the table (" + String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) + ". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int) columnIndex];
switch(colTypes[(int) columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean) value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number) value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float) value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double) value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String) value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date) value).getTime() / 1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[]) value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer) value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int) columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[]) value).length;
for (int i = 0; i < rows; ++i) {
Object rowArr = ((Object[]) value)[i];
subtable.insert(i, (Object[]) rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable)
throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) + " must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int) getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" + String.valueOf(values.length) + ") does not match the number of columns in the table (" + String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) + ". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable)
throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable)
throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable)
throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable)
throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable)
throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable)
throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable)
throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
// FIXME: support other than allocateDirect
throw new RuntimeException("Currently ByteBuffer must be allocateDirect().");
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable)
throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable)
throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable)
throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
@Override
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex) * 1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
@Override
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
@Override
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
@Override
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
@Override
public ColumnType getMixedType(long columnIndex, long rowIndex) {
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
@Override
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable)
throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
@Override
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable)
throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
@Override
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable)
throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
@Override
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable)
throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
@Override
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable)
throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
@Override
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable)
throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
@Override
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable)
throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
@Override
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
// FIXME: support other than allocateDirect
throw new RuntimeException("Currently ByteBuffer must be allocateDirect().");
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
@Override
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
@Override
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
@Override
public void addLong(long columnIndex, long value) {
if (immutable)
throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable)
throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
@Override
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
@Override
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
@Override
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
@Override
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
@Override
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
@Override
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
@Override
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
@Override
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
@Override
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
@Override
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
@Override
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
@Override
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
@Override
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
@Override
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
@Override
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
@Override
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
@Override
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
@Override
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
@Override
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
@Override
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
@Override
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
@Override
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
@Override
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
@Override
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
@Override
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
@Override
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
@Override
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable)
throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
@Override
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
@Override
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
@Override
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
@Override
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}
package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
public class Table implements TableOrView, TableSchema {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG)
System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid() {
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
public TableSchema getSubTableSchema(long columnIndex) {
if (nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableSchema(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
@Override
public long addColumn(ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
@Override
public void removeColumn(long columnIndex) {
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
@Override
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable)
throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
@Override
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
@Override
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
@Override
public void clear() {
if (immutable)
throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
@Override
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec() {
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
@Override
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
@Override
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
@Override
public ColumnType getColumnType(long columnIndex) {
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
@Override
public void remove(long rowIndex) {
if (immutable)
throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
@Override
public void removeLast() {
if (immutable)
throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable)
throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable)
throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable)
throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable)
throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) + " must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int) getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" + String.valueOf(values.length) + ") does not match the number of columns in the table (" + String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) + ". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int) columnIndex];
switch(colTypes[(int) columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean) value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number) value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float) value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double) value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String) value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date) value).getTime() / 1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[]) value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer) value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int) columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[]) value).length;
for (int i = 0; i < rows; ++i) {
Object rowArr = ((Object[]) value)[i];
subtable.insert(i, (Object[]) rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable)
throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) + " must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int) getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" + String.valueOf(values.length) + ") does not match the number of columns in the table (" + String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) + ". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable)
throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable)
throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable)
throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable)
throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable)
throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable)
throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable)
throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
// FIXME: support other than allocateDirect
throw new RuntimeException("Currently ByteBuffer must be allocateDirect().");
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable)
throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable)
throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable)
throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
@Override
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex) * 1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
@Override
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
@Override
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
@Override
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
@Override
public ColumnType getMixedType(long columnIndex, long rowIndex) {
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
@Override
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable)
throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
@Override
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable)
throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
@Override
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable)
throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
@Override
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable)
throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
@Override
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable)
throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
@Override
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable)
throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
@Override
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable)
throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
@Override
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
// FIXME: support other than allocateDirect
throw new RuntimeException("Currently ByteBuffer must be allocateDirect().");
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
@Override
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
@Override
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
@Override
public void addLong(long columnIndex, long value) {
if (immutable)
throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable)
throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
@Override
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
@Override
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
@Override
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
@Override
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
@Override
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
@Override
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
@Override
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
@Override
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
@Override
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
@Override
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
@Override
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
@Override
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
@Override
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
@Override
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
@Override
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
@Override
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
@Override
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
@Override
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
@Override
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
@Override
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
@Override
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
@Override
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
@Override
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
@Override
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
@Override
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
@Override
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
@Override
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable)
throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
@Override
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
@Override
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
@Override
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
@Override
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}
KeepBothMethods
package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
public class Table implements TableOrView, TableSchema {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG)
System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid() {
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
@Override
public TableDefinition getSubTableDefinition(long columnIndex) {
if (nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableDefinition(nativePtr, newPath);
}
public TableSchema getSubTableSchema(long columnIndex) {
if (nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableSchema(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
@Override
public long addColumn(ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
@Override
public void removeColumn(long columnIndex) {
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
@Override
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable)
throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
@Override
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
@Override
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
@Override
public void clear() {
if (immutable)
throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
@Override
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec() {
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
@Override
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
@Override
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
@Override
public ColumnType getColumnType(long columnIndex) {
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
@Override
public void remove(long rowIndex) {
if (immutable)
throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
@Override
public void removeLast() {
if (immutable)
throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable)
throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable)
throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable)
throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable)
throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) + " must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int) getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" + String.valueOf(values.length) + ") does not match the number of columns in the table (" + String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) + ". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int) columnIndex];
switch(colTypes[(int) columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean) value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number) value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float) value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double) value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String) value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date) value).getTime() / 1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[]) value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer) value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int) columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[]) value).length;
for (int i = 0; i < rows; ++i) {
Object rowArr = ((Object[]) value)[i];
subtable.insert(i, (Object[]) rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable)
throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) + " must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int) getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" + String.valueOf(values.length) + ") does not match the number of columns in the table (" + String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) + ". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable)
throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable)
throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable)
throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable)
throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable)
throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable)
throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable)
throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
// FIXME: support other than allocateDirect
throw new RuntimeException("Currently ByteBuffer must be allocateDirect().");
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable)
throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable)
throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable)
throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
@Override
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex) * 1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
@Override
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
@Override
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
@Override
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
@Override
public ColumnType getMixedType(long columnIndex, long rowIndex) {
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
@Override
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable)
throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
@Override
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable)
throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
@Override
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable)
throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
@Override
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable)
throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
@Override
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable)
throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
@Override
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable)
throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
@Override
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable)
throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
@Override
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
// FIXME: support other than allocateDirect
throw new RuntimeException("Currently ByteBuffer must be allocateDirect().");
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
@Override
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
@Override
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
@Override
public void addLong(long columnIndex, long value) {
if (immutable)
throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable)
throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
@Override
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
@Override
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
@Override
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
@Override
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
@Override
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
@Override
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
@Override
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
@Override
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
@Override
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
@Override
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
@Override
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
@Override
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
@Override
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
@Override
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
@Override
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
@Override
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
@Override
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
@Override
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
@Override
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
@Override
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
@Override
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
@Override
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
@Override
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
@Override
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
@Override
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
@Override
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
@Override
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable)
throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
@Override
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
@Override
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
@Override
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
@Override
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}
package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
public class Table implements TableOrView, TableSchema {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG)
System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid() {
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
@Override
public TableDefinition getSubTableDefinition(long columnIndex) {
if (nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableDefinition(nativePtr, newPath);
}
public TableSchema getSubTableSchema(long columnIndex) {
if (nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableSchema(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
@Override
public long addColumn(ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
@Override
public void removeColumn(long columnIndex) {
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
@Override
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable)
throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
@Override
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
@Override
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
@Override
public void clear() {
if (immutable)
throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
@Override
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec() {
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
@Override
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
@Override
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
@Override
public ColumnType getColumnType(long columnIndex) {
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
@Override
public void remove(long rowIndex) {
if (immutable)
throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
@Override
public void removeLast() {
if (immutable)
throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable)
throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable)
throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable)
throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable)
throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) + " must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int) getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" + String.valueOf(values.length) + ") does not match the number of columns in the table (" + String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) + ". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int) columnIndex];
switch(colTypes[(int) columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean) value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number) value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float) value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double) value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String) value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date) value).getTime() / 1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[]) value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer) value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int) columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[]) value).length;
for (int i = 0; i < rows; ++i) {
Object rowArr = ((Object[]) value)[i];
subtable.insert(i, (Object[]) rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable)
throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) + " must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int) getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" + String.valueOf(values.length) + ") does not match the number of columns in the table (" + String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) + ". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable)
throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable)
throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable)
throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable)
throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable)
throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable)
throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable)
throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
// FIXME: support other than allocateDirect
throw new RuntimeException("Currently ByteBuffer must be allocateDirect().");
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable)
throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable)
throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable)
throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
@Override
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex) * 1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
@Override
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
@Override
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
@Override
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
@Override
public ColumnType getMixedType(long columnIndex, long rowIndex) {
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
@Override
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable)
throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
@Override
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable)
throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
@Override
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable)
throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
@Override
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable)
throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
@Override
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable)
throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
@Override
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable)
throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
@Override
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable)
throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
@Override
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
// FIXME: support other than allocateDirect
throw new RuntimeException("Currently ByteBuffer must be allocateDirect().");
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
@Override
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
@Override
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable)
throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
@Override
public void addLong(long columnIndex, long value) {
if (immutable)
throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable)
throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
@Override
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
@Override
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
@Override
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
@Override
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
@Override
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
@Override
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
@Override
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
@Override
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
@Override
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
@Override
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
@Override
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
@Override
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
@Override
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
@Override
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
@Override
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
@Override
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
@Override
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
@Override
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
@Override
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
@Override
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
@Override
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
@Override
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
@Override
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
@Override
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
@Override
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
@Override
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
@Override
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable)
throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
@Override
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
@Override
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
@Override
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
@Override
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}
Safe
package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
/*
Add isEqual(Table)
*/
/**
* This class is a base class for all TightDB tables. The class supports all low
* level methods (define/insert/delete/update) a table has. All the native
* communications to the TightDB C++ library are also handled by this class.
*
* A user who wants to create a table of his choice will automatically inherit
* from this class by the tightdb-class generator.
*
* As an example, let's create a table to keep records of an employee in a
* company.
*
* For this purpose we will create a class named "employee" with an Entity
* annotation as follows.
*
* @DefineTable
* public class employee {
* String name;
* long age;
* boolean hired;
* byte[] imageData;
* }
*
* The tightdb class generator will generate classes relevant to the employee:
*
* 1. Employee.java: Represents one employee of the employee table i.e., a single row. Getter/setter
* methods are declared from which you will be able to set/get values
* for a particular employee.
* 2. EmployeeTable.java: Represents the class for storing a collection of employee i.e., a table
* of rows. The class is inherited from the TableBase class as described above.
* It has all the high level methods to manipulate Employee objects from the table.
* 3. EmployeeView.java: Represents view of the employee table i.e., result set of queries.
*
*
*/
public class Table implements TableOrView, TableSchema {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG) System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid(){
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
<<<<<<< MINE
public TableSchema getSubTableSchema(long columnIndex) {
=======
@Override
public TableDefinition getSubTableDefinition(long columnIndex) {
>>>>>>> YOURS
if(nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableSchema(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
@Override
public long addColumn (ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
@Override
public void removeColumn(long columnIndex) {
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
@Override
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable) throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
@Override
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
@Override
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
@Override
public void clear() {
if (immutable) throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
@Override
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec(){
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
@Override
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
@Override
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
@Override
public ColumnType getColumnType(long columnIndex) {
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
@Override
public void remove(long rowIndex) {
if (immutable) throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
@Override
public void removeLast() {
if (immutable) throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable) throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable) throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable) throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int)columnIndex];
switch (colTypes[(int)columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean)value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number)value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float)value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double)value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String)value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date)value).getTime()/1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[])value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer)value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int)columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[])value).length;
for (int i=0; i<rows; ++i) {
Object rowArr = ((Object[])value)[i];
subtable.insert(i, (Object[])rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime()/1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.printf("\ninsertBinary(col %d, row %d, ByteBuffer)\n", columnIndex, rowIndex);
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable) throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable) throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable) throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
@Override
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex)*1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
@Override
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
@Override
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
@Override
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
@Override
public ColumnType getMixedType(long columnIndex, long rowIndex) {
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
@Override
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable) throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
@Override
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
@Override
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
@Override
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
@Override
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
@Override
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
@Override
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
@Override
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
@Override
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
@Override
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
@Override
public void addLong(long columnIndex, long value) {
if (immutable) throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable) throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
@Override
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
@Override
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
@Override
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
@Override
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
@Override
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
@Override
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
@Override
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
@Override
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
@Override
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
@Override
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
@Override
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
@Override
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
@Override
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
@Override
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
@Override
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
@Override
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
@Override
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
@Override
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
@Override
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
@Override
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
@Override
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
@Override
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
@Override
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
@Override
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
@Override
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
@Override
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
@Override
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable) throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
@Override
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
@Override
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
@Override
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
@Override
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}
package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
/*
Add isEqual(Table)
*/
/**
* This class is a base class for all TightDB tables. The class supports all low
* level methods (define/insert/delete/update) a table has. All the native
* communications to the TightDB C++ library are also handled by this class.
*
* A user who wants to create a table of his choice will automatically inherit
* from this class by the tightdb-class generator.
*
* As an example, let's create a table to keep records of an employee in a
* company.
*
* For this purpose we will create a class named "employee" with an Entity
* annotation as follows.
*
* @DefineTable
* public class employee {
* String name;
* long age;
* boolean hired;
* byte[] imageData;
* }
*
* The tightdb class generator will generate classes relevant to the employee:
*
* 1. Employee.java: Represents one employee of the employee table i.e., a single row. Getter/setter
* methods are declared from which you will be able to set/get values
* for a particular employee.
* 2. EmployeeTable.java: Represents the class for storing a collection of employee i.e., a table
* of rows. The class is inherited from the TableBase class as described above.
* It has all the high level methods to manipulate Employee objects from the table.
* 3. EmployeeView.java: Represents view of the employee table i.e., result set of queries.
*
*
*/
public class Table implements TableOrView, TableSchema {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG) System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid(){
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
<<<<<<< MINE
public TableSchema getSubTableSchema(long columnIndex) {
=======
@Override
public TableDefinition getSubTableDefinition(long columnIndex) {
>>>>>>> YOURS
if(nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableSchema(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
@Override
public long addColumn (ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
@Override
public void removeColumn(long columnIndex) {
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
@Override
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable) throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
@Override
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
@Override
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
@Override
public void clear() {
if (immutable) throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
@Override
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec(){
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
@Override
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
@Override
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
@Override
public ColumnType getColumnType(long columnIndex) {
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
@Override
public void remove(long rowIndex) {
if (immutable) throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
@Override
public void removeLast() {
if (immutable) throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable) throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable) throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable) throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int)columnIndex];
switch (colTypes[(int)columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean)value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number)value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float)value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double)value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String)value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date)value).getTime()/1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[])value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer)value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int)columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[])value).length;
for (int i=0; i<rows; ++i) {
Object rowArr = ((Object[])value)[i];
subtable.insert(i, (Object[])rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime()/1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.printf("\ninsertBinary(col %d, row %d, ByteBuffer)\n", columnIndex, rowIndex);
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable) throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable) throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable) throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
@Override
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex)*1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
@Override
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
@Override
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
@Override
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
@Override
public ColumnType getMixedType(long columnIndex, long rowIndex) {
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
@Override
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable) throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
@Override
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
@Override
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
@Override
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
@Override
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
@Override
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
@Override
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
@Override
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
@Override
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
@Override
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
@Override
public void addLong(long columnIndex, long value) {
if (immutable) throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable) throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
@Override
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
@Override
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
@Override
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
@Override
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
@Override
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
@Override
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
@Override
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
@Override
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
@Override
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
@Override
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
@Override
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
@Override
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
@Override
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
@Override
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
@Override
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
@Override
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
@Override
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
@Override
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
@Override
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
@Override
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
@Override
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
@Override
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
@Override
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
@Override
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
@Override
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
@Override
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
@Override
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable) throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
@Override
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
@Override
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
@Override
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
@Override
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}
Unstructured
package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
/*
Add isEqual(Table)
*/
/**
* This class is a base class for all TightDB tables. The class supports all low
* level methods (define/insert/delete/update) a table has. All the native
* communications to the TightDB C++ library are also handled by this class.
*
* A user who wants to create a table of his choice will automatically inherit
* from this class by the tightdb-class generator.
*
* As an example, let's create a table to keep records of an employee in a
* company.
*
* For this purpose we will create a class named "employee" with an Entity
* annotation as follows.
*
* @DefineTable
* public class employee {
* String name;
* long age;
* boolean hired;
* byte[] imageData;
* }
*
* The tightdb class generator will generate classes relevant to the employee:
*
* 1. Employee.java: Represents one employee of the employee table i.e., a single row. Getter/setter
* methods are declared from which you will be able to set/get values
* for a particular employee.
* 2. EmployeeTable.java: Represents the class for storing a collection of employee i.e., a table
* of rows. The class is inherited from the TableBase class as described above.
* It has all the high level methods to manipulate Employee objects from the table.
* 3. EmployeeView.java: Represents view of the employee table i.e., result set of queries.
*
*
*/
public class Table implements TableOrView, TableSchema {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG) System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid(){
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
<<<<<<< MINE
@Override
public TableDefinition getSubTableDefinition(long columnIndex) {
=======
public TableSchema getSubTableSchema(long columnIndex) {
>>>>>>> YOURS
if(nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableSchema(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
@Override
public long addColumn (ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
@Override
public void removeColumn(long columnIndex) {
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
@Override
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable) throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
@Override
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
@Override
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
@Override
public void clear() {
if (immutable) throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
@Override
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec(){
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
@Override
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
@Override
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
@Override
public ColumnType getColumnType(long columnIndex) {
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
@Override
public void remove(long rowIndex) {
if (immutable) throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
@Override
public void removeLast() {
if (immutable) throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable) throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable) throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable) throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int)columnIndex];
switch (colTypes[(int)columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean)value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number)value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float)value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double)value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String)value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date)value).getTime()/1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[])value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer)value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int)columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[])value).length;
for (int i=0; i<rows; ++i) {
Object rowArr = ((Object[])value)[i];
subtable.insert(i, (Object[])rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime()/1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.printf("\ninsertBinary(col %d, row %d, ByteBuffer)\n", columnIndex, rowIndex);
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable) throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable) throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable) throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
@Override
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex)*1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
@Override
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
@Override
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
@Override
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
@Override
public ColumnType getMixedType(long columnIndex, long rowIndex) {
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
@Override
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable) throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
@Override
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
@Override
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
@Override
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
@Override
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
@Override
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
@Override
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
@Override
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
@Override
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
@Override
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
@Override
public void addLong(long columnIndex, long value) {
if (immutable) throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable) throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
@Override
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
@Override
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
@Override
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
@Override
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
@Override
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
@Override
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
@Override
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
@Override
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
@Override
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
@Override
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
@Override
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
@Override
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
@Override
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
@Override
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
@Override
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
@Override
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
@Override
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
@Override
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
@Override
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
@Override
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
@Override
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
@Override
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
@Override
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
@Override
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
@Override
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
@Override
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
@Override
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable) throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
@Override
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
@Override
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
@Override
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
@Override
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}package com.tightdb;
import java.nio.ByteBuffer;
import java.util.Date;
import com.tightdb.internal.CloseMutex;
import com.tightdb.typed.TightDB;
/*
Add isEqual(Table)
*/
/**
* This class is a base class for all TightDB tables. The class supports all low
* level methods (define/insert/delete/update) a table has. All the native
* communications to the TightDB C++ library are also handled by this class.
*
* A user who wants to create a table of his choice will automatically inherit
* from this class by the tightdb-class generator.
*
* As an example, let's create a table to keep records of an employee in a
* company.
*
* For this purpose we will create a class named "employee" with an Entity
* annotation as follows.
*
* @DefineTable
* public class employee {
* String name;
* long age;
* boolean hired;
* byte[] imageData;
* }
*
* The tightdb class generator will generate classes relevant to the employee:
*
* 1. Employee.java: Represents one employee of the employee table i.e., a single row. Getter/setter
* methods are declared from which you will be able to set/get values
* for a particular employee.
* 2. EmployeeTable.java: Represents the class for storing a collection of employee i.e., a table
* of rows. The class is inherited from the TableBase class as described above.
* It has all the high level methods to manipulate Employee objects from the table.
* 3. EmployeeView.java: Represents view of the employee table i.e., result set of queries.
*
*
*/
public class Table implements TableOrView, TableSchema {
public static final long INFINITE = -1;
protected long nativePtr;
protected boolean immutable = false;
// test:
protected int tableNo;
protected boolean DEBUG = false;
protected static int TableCount = 0;
static {
TightDB.loadLibrary();
}
/**
* Construct a Table base object. It can be used to register columns in this
* table. Registering into table is allowed only for empty tables. It
* creates a native reference of the object and keeps a reference to it.
*/
public Table() {
// Native methods work will be initialized here. Generated classes will
// have nothing to do with the native functions. Generated Java Table
// classes will work as a wrapper on top of table.
nativePtr = createNative();
if (nativePtr == 0)
throw new OutOfMemoryError("Out of native memory.");
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("====== New Tablebase " + tableNo + " : ptr = " + nativePtr);
}
}
protected native long createNative();
protected Table(Object parent, long nativePtr, boolean immutable) {
this.immutable = immutable;
this.nativePtr = nativePtr;
if (DEBUG) {
tableNo = ++TableCount;
System.err.println("===== New Tablebase(ptr) " + tableNo + " : ptr = " + nativePtr);
}
}
@Override
public void finalize() throws Throwable {
if (DEBUG) System.err.println("==== FINALIZE " + tableNo + "...");
try {
close();
} finally {
super.finalize();
}
}
private void close() {
synchronized (CloseMutex.getInstance()) {
if (nativePtr == 0) {
if (DEBUG)
System.err.println(".... CLOSE ignored.");
return;
}
if (DEBUG) {
TableCount--;
System.err.println("==== CLOSE " + tableNo + " ptr= " + nativePtr + " remaining " + TableCount);
}
nativeClose(nativePtr);
nativePtr = 0;
}
}
protected native void nativeClose(long nativeTablePtr);
/*
* Check if the Table is valid.
* Whenever a Table/subtable is changed/updated all it's subtables are invalidated.
* You can no longer perform any actions on the table, and if done anyway, an exception is thrown.
* The only method you can call is 'isValid()'.
*/
public boolean isValid(){
if (nativePtr == 0)
return false;
return nativeIsValid(nativePtr);
}
protected native boolean nativeIsValid(long nativeTablePtr);
@Override
public boolean equals(Object other) {
if (this == other)
return true;
if (other == null)
return false;
// Has to work for all the typed tables as well
if (!(other instanceof Table))
return false;
Table otherTable = (Table) other;
return nativeEquals(nativePtr, otherTable.nativePtr);
}
protected native boolean nativeEquals(long nativeTablePtr, long nativeTableToComparePtr);
private void verifyColumnName(String name) {
if (name.length() > 63) {
throw new IllegalArgumentException("Column names are currently limited to max 63 characters.");
}
}
<<<<<<< MINE
@Override
public TableDefinition getSubTableDefinition(long columnIndex) {
=======
public TableSchema getSubTableSchema(long columnIndex) {
>>>>>>> YOURS
if(nativeIsRootTable(nativePtr) == false)
throw new UnsupportedOperationException("This is a subtable. Can only be called on root table.");
long[] newPath = new long[1];
newPath[0] = columnIndex;
return new SubTableSchema(nativePtr, newPath);
}
protected native boolean nativeIsRootTable(long nativeTablePtr);
/**
* Add a column to the table dynamically.
* @return Index of the new column.
*/
@Override
public long addColumn (ColumnType type, String name) {
verifyColumnName(name);
return nativeAddColumn(nativePtr, type.getValue(), name);
}
protected native long nativeAddColumn(long nativeTablePtr, int type, String name);
/**
* Remove a column in the table dynamically.
*/
@Override
public void removeColumn(long columnIndex) {
nativeRemoveColumn(nativePtr, columnIndex);
}
protected native void nativeRemoveColumn(long nativeTablePtr, long columnIndex);
/**
* Rename a column in the table.
*/
@Override
public void renameColumn(long columnIndex, String newName) {
verifyColumnName(newName);
nativeRenameColumn(nativePtr, columnIndex, newName);
}
protected native void nativeRenameColumn(long nativeTablePtr, long columnIndex, String name);
/**
* Updates a table specification from a Table specification structure.
* Supported types - refer to @see ColumnType.
*
* @param columnType
* data type of the column @see <code>ColumnType</code>
* @param columnName
* name of the column. Duplicate column name is not allowed.
*/
public void updateFromSpec(TableSpec tableSpec) {
if (immutable) throwImmutable();
nativeUpdateFromSpec(nativePtr, tableSpec);
}
protected native void nativeUpdateFromSpec(long nativeTablePtr, TableSpec tableSpec);
// Table Size and deletion. AutoGenerated subclasses are nothing to do with this
// class.
/**
* Get the number of entries/rows of this table.
*
* @return The number of rows.
*/
@Override
public long size() {
return nativeSize(nativePtr);
}
protected native long nativeSize(long nativeTablePtr);
/**
* Checks whether this table is empty or not.
*
* @return true if empty, otherwise false.
*/
@Override
public boolean isEmpty() {
return size() == 0;
}
/**
* Clears the table i.e., deleting all rows in the table.
*/
@Override
public void clear() {
if (immutable) throwImmutable();
nativeClear(nativePtr);
}
protected native void nativeClear(long nativeTablePtr);
// Column Information.
/**
* Returns the number of columns in the table.
*
* @return the number of columns.
*/
@Override
public long getColumnCount() {
return nativeGetColumnCount(nativePtr);
}
protected native long nativeGetColumnCount(long nativeTablePtr);
public TableSpec getTableSpec(){
return nativeGetTableSpec(nativePtr);
}
protected native TableSpec nativeGetTableSpec(long nativeTablePtr);
/**
* Returns the name of a column identified by columnIndex. Notice that the
* index is zero based.
*
* @param columnIndex the column index
* @return the name of the column
*/
@Override
public String getColumnName(long columnIndex) {
return nativeGetColumnName(nativePtr, columnIndex);
}
protected native String nativeGetColumnName(long nativeTablePtr, long columnIndex);
/**
* Returns the 0-based index of a column based on the name.
*
* @param name column name
* @return the index, -1 if not found
*/
@Override
public long getColumnIndex(String name) {
long columnCount = getColumnCount();
for (long i = 0; i < columnCount; i++) {
if (name.equals(getColumnName(i))) {
return i;
}
}
return -1;
}
/**
* Get the type of a column identified by the columnIdex.
*
* @param columnIndex index of the column.
* @return Type of the particular column.
*/
@Override
public ColumnType getColumnType(long columnIndex) {
return ColumnType.fromNativeValue(nativeGetColumnType(nativePtr, columnIndex));
}
protected native int nativeGetColumnType(long nativeTablePtr, long columnIndex);
/**
* Removes a row from the specific index. As of now the entry is simply
* removed from the table.
*
* @param rowIndex the row index (starting with 0)
*
*/
@Override
public void remove(long rowIndex) {
if (immutable) throwImmutable();
nativeRemove(nativePtr, rowIndex);
}
protected native void nativeRemove(long nativeTablePtr, long rowIndex);
@Override
public void removeLast() {
if (immutable) throwImmutable();
nativeRemoveLast(nativePtr);
}
protected native void nativeRemoveLast(long nativeTablePtr);
/**
* EXPERIMENTAL function
*/
public void moveLastOver(long rowIndex) {
if (immutable) throwImmutable();
nativeMoveLastOver(nativePtr, rowIndex);
}
protected native void nativeMoveLastOver(long nativeTablePtr, long rowIndex);
// Row Handling methods.
public long addEmptyRow() {
if (immutable) throwImmutable();
return nativeAddEmptyRow(nativePtr, 1);
}
public long addEmptyRows(long rows) {
if (immutable) throwImmutable();
if (rows < 1)
throw new IllegalArgumentException("'rows' must be > 0.");
return nativeAddEmptyRow(nativePtr, rows);
}
protected native long nativeAddEmptyRow(long nativeTablePtr, long rows);
public long add(Object... values) {
long rowIndex = size();
insert(rowIndex, values);
return rowIndex;
}
public void insert(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex > size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be <= table.size() " + String.valueOf(size) + ".");
}
// Check values types
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Insert values
for (long columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[(int)columnIndex];
switch (colTypes[(int)columnIndex]) {
case ColumnTypeBool:
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, (Boolean)value);
break;
case ColumnTypeInt:
nativeInsertLong(nativePtr, columnIndex, rowIndex, ((Number)value).longValue());
break;
case ColumnTypeFloat:
nativeInsertFloat(nativePtr, columnIndex, rowIndex, ((Float)value).floatValue());
break;
case ColumnTypeDouble:
nativeInsertDouble(nativePtr, columnIndex, rowIndex, ((Double)value).doubleValue());
break;
case ColumnTypeString:
nativeInsertString(nativePtr, columnIndex, rowIndex, (String)value);
break;
case ColumnTypeDate:
nativeInsertDate(nativePtr, columnIndex, rowIndex, ((Date)value).getTime()/1000);
break;
case ColumnTypeMixed:
nativeInsertMixed(nativePtr, columnIndex, rowIndex, Mixed.mixedValue(value));
break;
case ColumnTypeBinary:
if (value instanceof byte[])
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, (byte[])value);
else if (value instanceof ByteBuffer)
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, (ByteBuffer)value);
break;
case ColumnTypeTable:
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, value);
break;
default:
throw new RuntimeException("Unexpected columnType: " + String.valueOf(colTypes[(int)columnIndex]));
}
}
insertDone();
}
private void insertSubtableValues(long rowIndex, long columnIndex, Object value) {
if (value != null) {
// insert rows in subtable recursively
Table subtable = getSubTableDuringInsert(columnIndex, rowIndex);
int rows = ((Object[])value).length;
for (int i=0; i<rows; ++i) {
Object rowArr = ((Object[])value)[i];
subtable.insert(i, (Object[])rowArr);
}
}
}
public void set(long rowIndex, Object... values) {
if (immutable) throwImmutable();
// Check index
long size = size();
if (rowIndex >= size) {
throw new IllegalArgumentException("rowIndex " + String.valueOf(rowIndex) +
" must be < table.size() " + String.valueOf(size) + ".");
}
// Verify number of 'values'
int columns = (int)getColumnCount();
if (columns != values.length) {
throw new IllegalArgumentException("The number of value parameters (" +
String.valueOf(values.length) +
") does not match the number of columns in the table (" +
String.valueOf(columns) + ").");
}
// Verify type of 'values'
ColumnType colTypes[] = new ColumnType[columns];
for (int columnIndex = 0; columnIndex < columns; columnIndex++) {
Object value = values[columnIndex];
ColumnType colType = getColumnType(columnIndex);
colTypes[columnIndex] = colType;
if (!colType.matchObject(value)) {
throw new IllegalArgumentException("Invalid argument no " + String.valueOf(1 + columnIndex) +
". Expected a value compatible with column type " + colType + ", but got " + value.getClass() + ".");
}
}
// Now that all values are verified, we can remove the row and insert it again.
// TODO: Can be optimized to only set the values (but clear any subtables)
remove(rowIndex);
insert(rowIndex, values);
}
public void insertLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeInsertLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
public void insertFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeInsertFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
public void insertDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeInsertDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
public void insertBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeInsertBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
public void insertDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeInsertDate(nativePtr, columnIndex, rowIndex, date.getTime()/1000);
}
protected native void nativeInsertDate(long nativePtr, long columnIndex, long rowIndex, long dateTimeValue);
public void insertString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeInsertString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeInsertString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
public void insertMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
nativeInsertMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed mixed);
public void insertBinary(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("ByteBuffer is null");
//System.err.printf("\ninsertBinary(col %d, row %d, ByteBuffer)\n", columnIndex, rowIndex);
//System.err.println("-- HasArray: " + (data.hasArray() ? "yes":"no") + " len= " + data.array().length);
if (data.isDirect())
nativeInsertByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeInsertByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
public void insertBinary(long columnIndex, long rowIndex, byte[] data) {
if (data == null)
throw new NullPointerException("Null Array");
if (immutable) throwImmutable();
nativeInsertByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeInsertByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
public void insertSubTable(long columnIndex, long rowIndex, Object[][] values) {
if (immutable) throwImmutable();
nativeInsertSubTable(nativePtr, columnIndex, rowIndex);
insertSubtableValues(rowIndex, columnIndex, values);
}
protected native void nativeInsertSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
public void insertDone() {
if (immutable) throwImmutable();
nativeInsertDone(nativePtr);
}
protected native void nativeInsertDone(long nativeTablePtr);
//
// Getters
//
@Override
public long getLong(long columnIndex, long rowIndex) {
return nativeGetLong(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetLong(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public boolean getBoolean(long columnIndex, long rowIndex) {
return nativeGetBoolean(nativePtr, columnIndex, rowIndex);
}
protected native boolean nativeGetBoolean(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public float getFloat(long columnIndex, long rowIndex) {
return nativeGetFloat(nativePtr, columnIndex, rowIndex);
}
protected native float nativeGetFloat(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public double getDouble(long columnIndex, long rowIndex) {
return nativeGetDouble(nativePtr, columnIndex, rowIndex);
}
protected native double nativeGetDouble(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public Date getDate(long columnIndex, long rowIndex) {
return new Date(nativeGetDateTime(nativePtr, columnIndex, rowIndex)*1000);
}
protected native long nativeGetDateTime(long nativeTablePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (string )cell.
*
* @param columnIndex
* 0 based index value of the column
* @param rowIndex
* 0 based index of the row.
* @return value of the particular cell
*/
@Override
public String getString(long columnIndex, long rowIndex) {
return nativeGetString(nativePtr, columnIndex, rowIndex);
}
protected native String nativeGetString(long nativePtr, long columnIndex, long rowIndex);
/**
* Get the value of a (binary) cell.
*
* @param columnIndex
* 0 based index value of the cell column
* @param rowIndex
* 0 based index value of the cell row
* @return value of the particular cell.
*/
@Override
public ByteBuffer getBinaryByteBuffer(long columnIndex, long rowIndex) {
return nativeGetByteBuffer(nativePtr, columnIndex, rowIndex);
}
protected native ByteBuffer nativeGetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex);
@Override
public byte[] getBinaryByteArray(long columnIndex, long rowIndex) {
return nativeGetByteArray(nativePtr, columnIndex, rowIndex);
}
protected native byte[] nativeGetByteArray(long nativePtr, long columnIndex, long rowIndex);
@Override
public Mixed getMixed(long columnIndex, long rowIndex) {
return nativeGetMixed(nativePtr, columnIndex, rowIndex);
}
@Override
public ColumnType getMixedType(long columnIndex, long rowIndex) {
return ColumnType.fromNativeValue(nativeGetMixedType(nativePtr, columnIndex, rowIndex));
}
protected native int nativeGetMixedType(long nativePtr, long columnIndex, long rowIndex);
protected native Mixed nativeGetMixed(long nativeTablePtr, long columnIndex, long rowIndex);
/**
*
* Note: The subtable returned will have to be closed again after use.
* You can let javas garbage collector handle that or better yet call close()
* after use.
*
* @param columnIndex column index of the cell
* @param rowIndex row index of the cell
* @return TableBase the subtable at the requested cell
*/
@Override
public Table getSubTable(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTable(nativePtr, columnIndex, rowIndex), immutable);
}
protected native long nativeGetSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
// Below version will allow to getSubTable when number of available rows are not updated yet -
// which happens before an insertDone().
private Table getSubTableDuringInsert(long columnIndex, long rowIndex) {
return new Table(this, nativeGetSubTableDuringInsert(nativePtr, columnIndex, rowIndex), immutable);
}
private native long nativeGetSubTableDuringInsert(long nativeTablePtr, long columnIndex, long rowIndex);
public long getSubTableSize(long columnIndex, long rowIndex) {
return nativeGetSubTableSize(nativePtr, columnIndex, rowIndex);
}
protected native long nativeGetSubTableSize(long nativeTablePtr, long columnIndex, long rowIndex);
public void clearSubTable(long columnIndex, long rowIndex) {
if (immutable) throwImmutable();
nativeClearSubTable(nativePtr, columnIndex, rowIndex);
}
protected native void nativeClearSubTable(long nativeTablePtr, long columnIndex, long rowIndex);
//
// Setters
//
@Override
public void setLong(long columnIndex, long rowIndex, long value) {
if (immutable) throwImmutable();
nativeSetLong(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetLong(long nativeTablePtr, long columnIndex, long rowIndex, long value);
@Override
public void setBoolean(long columnIndex, long rowIndex, boolean value) {
if (immutable) throwImmutable();
nativeSetBoolean(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetBoolean(long nativeTablePtr, long columnIndex, long rowIndex, boolean value);
@Override
public void setFloat(long columnIndex, long rowIndex, float value) {
if (immutable) throwImmutable();
nativeSetFloat(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetFloat(long nativeTablePtr, long columnIndex, long rowIndex, float value);
@Override
public void setDouble(long columnIndex, long rowIndex, double value) {
if (immutable) throwImmutable();
nativeSetDouble(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetDouble(long nativeTablePtr, long columnIndex, long rowIndex, double value);
@Override
public void setDate(long columnIndex, long rowIndex, Date date) {
if (immutable) throwImmutable();
nativeSetDate(nativePtr, columnIndex, rowIndex, date.getTime() / 1000);
}
protected native void nativeSetDate(long nativeTablePtr, long columnIndex, long rowIndex, long dateTimeValue);
@Override
public void setString(long columnIndex, long rowIndex, String value) {
if (immutable) throwImmutable();
nativeSetString(nativePtr, columnIndex, rowIndex, value);
}
protected native void nativeSetString(long nativeTablePtr, long columnIndex, long rowIndex, String value);
/**
* Sets the value for a (binary) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
* the ByteBuffer must be allocated with ByteBuffer.allocateDirect(len)
*/
@Override
public void setBinaryByteBuffer(long columnIndex, long rowIndex, ByteBuffer data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null array");
if (data.isDirect())
nativeSetByteBuffer(nativePtr, columnIndex, rowIndex, data);
else
throw new RuntimeException("Currently ByteBuffer must be allocateDirect()."); // FIXME: support other than allocateDirect
}
protected native void nativeSetByteBuffer(long nativeTablePtr, long columnIndex, long rowIndex, ByteBuffer data);
@Override
public void setBinaryByteArray(long columnIndex, long rowIndex, byte[] data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException("Null Array");
nativeSetByteArray(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetByteArray(long nativePtr, long columnIndex, long rowIndex, byte[] data);
/**
* Sets the value for a (mixed typed) cell.
*
* @param columnIndex
* column index of the cell
* @param rowIndex
* row index of the cell
* @param data
*/
@Override
public void setMixed(long columnIndex, long rowIndex, Mixed data) {
if (immutable) throwImmutable();
if (data == null)
throw new NullPointerException();
nativeSetMixed(nativePtr, columnIndex, rowIndex, data);
}
protected native void nativeSetMixed(long nativeTablePtr, long columnIndex, long rowIndex, Mixed data);
/**
* Add the value for to all cells in the column.
*
* @param columnIndex column index of the cell
* @param value
*/
//!!!TODO: New. Support in highlevel API
@Override
public void addLong(long columnIndex, long value) {
if (immutable) throwImmutable();
nativeAddInt(nativePtr, columnIndex, value);
}
protected native void nativeAddInt(long nativeViewPtr, long columnIndex, long value);
public void setIndex(long columnIndex) {
if (immutable) throwImmutable();
if (getColumnType(columnIndex) != ColumnType.ColumnTypeString)
throw new IllegalArgumentException("Index is only supported on string columns.");
nativeSetIndex(nativePtr, columnIndex);
}
protected native void nativeSetIndex(long nativePtr, long columnIndex);
public boolean hasIndex(long columnIndex) {
return nativeHasIndex(nativePtr, columnIndex);
}
protected native boolean nativeHasIndex(long nativePtr, long columnIndex);
//
// Aggregate functions
//
// Integers
@Override
public long sum(long columnIndex) {
return nativeSum(nativePtr, columnIndex);
}
protected native long nativeSum(long nativePtr, long columnIndex);
@Override
public long maximum(long columnIndex) {
return nativeMaximum(nativePtr, columnIndex);
}
protected native long nativeMaximum(long nativePtr, long columnIndex);
@Override
public long minimum(long columnIndex) {
return nativeMinimum(nativePtr, columnIndex);
}
protected native long nativeMinimum(long nativePtr, long columnnIndex);
@Override
public double average(long columnIndex) {
return nativeAverage(nativePtr, columnIndex);
}
protected native double nativeAverage(long nativePtr, long columnIndex);
// Floats
@Override
public double sumFloat(long columnIndex) {
return nativeSumFloat(nativePtr, columnIndex);
}
protected native double nativeSumFloat(long nativePtr, long columnIndex);
@Override
public float maximumFloat(long columnIndex) {
return nativeMaximumFloat(nativePtr, columnIndex);
}
protected native float nativeMaximumFloat(long nativePtr, long columnIndex);
@Override
public float minimumFloat(long columnIndex) {
return nativeMinimumFloat(nativePtr, columnIndex);
}
protected native float nativeMinimumFloat(long nativePtr, long columnnIndex);
@Override
public double averageFloat(long columnIndex) {
return nativeAverageFloat(nativePtr, columnIndex);
}
protected native double nativeAverageFloat(long nativePtr, long columnIndex);
// Doubles
@Override
public double sumDouble(long columnIndex) {
return nativeSumDouble(nativePtr, columnIndex);
}
protected native double nativeSumDouble(long nativePtr, long columnIndex);
@Override
public double maximumDouble(long columnIndex) {
return nativeMaximumDouble(nativePtr, columnIndex);
}
protected native double nativeMaximumDouble(long nativePtr, long columnIndex);
@Override
public double minimumDouble(long columnIndex) {
return nativeMinimumDouble(nativePtr, columnIndex);
}
protected native double nativeMinimumDouble(long nativePtr, long columnnIndex);
@Override
public double averageDouble(long columnIndex) {
return nativeAverageDouble(nativePtr, columnIndex);
}
protected native double nativeAverageDouble(long nativePtr, long columnIndex);
//
// Count
//
public long count(long columnIndex, long value) {
return nativeCountLong(nativePtr, columnIndex, value);
}
protected native long nativeCountLong(long nativePtr, long columnIndex, long value);
public long count(long columnIndex, float value) {
return nativeCountFloat(nativePtr, columnIndex, value);
}
protected native long nativeCountFloat(long nativePtr, long columnIndex, float value);
public long count(long columnIndex, double value) {
return nativeCountDouble(nativePtr, columnIndex, value);
}
protected native long nativeCountDouble(long nativePtr, long columnIndex, double value);
public long count(long columnIndex, String value) {
return nativeCountString(nativePtr, columnIndex, value);
}
protected native long nativeCountString(long nativePtr, long columnIndex, String value);
//
// Searching methods.
//
public TableQuery where() {
return new TableQuery(nativeWhere(nativePtr), immutable);
}
protected native long nativeWhere(long nativeTablePtr);
@Override
public long findFirstLong(long columnIndex, long value) {
return nativeFindFirstInt(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstInt(long nativeTablePtr, long columnIndex, long value);
@Override
public long findFirstBoolean(long columnIndex, boolean value) {
return nativeFindFirstBool(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstBool(long nativePtr, long columnIndex, boolean value);
@Override
public long findFirstFloat(long columnIndex, float value) {
return nativeFindFirstFloat(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstFloat(long nativePtr, long columnIndex, float value);
@Override
public long findFirstDouble(long columnIndex, double value) {
return nativeFindFirstDouble(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstDouble(long nativePtr, long columnIndex, double value);
@Override
public long findFirstDate(long columnIndex, Date date) {
return nativeFindFirstDate(nativePtr, columnIndex, date.getTime() / 1000);
}
protected native long nativeFindFirstDate(long nativeTablePtr, long columnIndex, long dateTimeValue);
@Override
public long findFirstString(long columnIndex, String value) {
return nativeFindFirstString(nativePtr, columnIndex, value);
}
protected native long nativeFindFirstString(long nativeTablePtr, long columnIndex, String value);
@Override
public TableView findAllLong(long columnIndex, long value) {
return new TableView(nativeFindAllInt(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllInt(long nativePtr, long columnIndex, long value);
@Override
public TableView findAllBoolean(long columnIndex, boolean value) {
return new TableView(nativeFindAllBool(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllBool(long nativePtr, long columnIndex, boolean value);
@Override
public TableView findAllFloat(long columnIndex, float value) {
return new TableView(nativeFindAllFloat(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllFloat(long nativePtr, long columnIndex, float value);
@Override
public TableView findAllDouble(long columnIndex, double value) {
return new TableView(nativeFindAllDouble(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllDouble(long nativePtr, long columnIndex, double value);
@Override
public TableView findAllDate(long columnIndex, Date date) {
return new TableView(nativeFindAllDate(nativePtr, columnIndex, date.getTime() / 1000), immutable);
}
protected native long nativeFindAllDate(long nativePtr, long columnIndex, long dateTimeValue);
@Override
public TableView findAllString(long columnIndex, String value) {
return new TableView(nativeFindAllString(nativePtr, columnIndex, value), immutable);
}
protected native long nativeFindAllString(long nativePtr, long columnIndex, String value);
// Requires that the first column is a string column with index
@Override
public long lookup(String value) {
if (!this.hasIndex(0) || this.getColumnType(0) != ColumnType.ColumnTypeString)
throw new RuntimeException("lookup() requires index on column 0 which must be a String column.");
return nativeLookup(nativePtr, value);
}
protected native long nativeLookup(long nativeTablePtr, String value);
// Experimental feature
@Override
public long lowerBoundLong(long columnIndex, long value) {
return nativeLowerBoundInt(nativePtr, columnIndex, value);
}
@Override
public long upperBoundLong(long columnIndex, long value) {
return nativeUpperBoundInt(nativePtr, columnIndex, value);
}
protected native long nativeLowerBoundInt(long nativePtr, long columnIndex, long value);
protected native long nativeUpperBoundInt(long nativePtr, long columnIndex, long value);
//
public TableView distinct(long columnIndex) {
return new TableView(nativeDistinct(nativePtr, columnIndex), immutable);
}
protected native long nativeDistinct(long nativePtr, long columnIndex);
// Optimize
public void optimize() {
if (immutable) throwImmutable();
nativeOptimize(nativePtr);
}
protected native void nativeOptimize(long nativeTablePtr);
@Override
public String toJson() {
return nativeToJson(nativePtr);
}
protected native String nativeToJson(long nativeTablePtr);
@Override
public String toString() {
return nativeToString(nativePtr, INFINITE);
}
@Override
public String toString(long maxRows) {
return nativeToString(nativePtr, maxRows);
}
protected native String nativeToString(long nativeTablePtr, long maxRows);
@Override
public String rowToString(long rowIndex) {
return nativeRowToString(nativePtr, rowIndex);
}
protected native String nativeRowToString(long nativeTablePtr, long rowIndex);
private void throwImmutable() {
throw new IllegalStateException("Mutable method call during read transaction.");
}
}
Diff Result
No diff
Case 22 - retrofit.rev_941ae_2ef7c.RestAdapter.java
private RestAdapter
Left modified signature parameter type: Headers → HeaderPairs
Unstructured reported conflict including signature and body
Safe kept both versions
MergeMethods reported conflict including whole method
KeepBothMethods kept both versions
Base
// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Converts Java method calls to Rest calls.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
private final Headers headers;
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, Headers headers, Converter converter, Profiler profiler, Log log,
boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.headers = headers;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Adapts a Java interface to a REST API.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The names of URL parameters are retrieved from {@link Name}
* annotations on the method parameters.
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.</li>
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified
* {@link Converter}.</li>
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("go") // Asynchronous execution.
* void go(@Name("a") String a, @Name("b") int b, Callback<? super MyResult> callback);
* @POST("go") // Synchronous execution.
* MyResult go(@Name("a") String a, @Name("b") int b);
* }
* </pre>
*
* @param type to implement
*/
@SuppressWarnings("unchecked")
public <T> T create(Class<T> type) {
if (!type.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(type.getClassLoader(), new Class<?>[] { type },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.setApiUrl(serverUrl)
.setArgs(args)
.setHeaders(headers.get())
.setMethodInfo(methodDetails)
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.type;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (Header header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (Header header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.restMethod.value(), serverUrl,
methodDetails.path, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
private Headers headers;
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
public Builder setHeaders(Headers headers) {
if (headers == null) throw new NullPointerException("headers");
this.headers = headers;
return this;
}
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, headers,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
if (headers == null) {
headers = Headers.NONE;
}
}
}
}
// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Converts Java method calls to Rest calls.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
private final Headers headers;
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, Headers headers, Converter converter, Profiler profiler, Log log,
boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.headers = headers;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Adapts a Java interface to a REST API.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The names of URL parameters are retrieved from {@link Name}
* annotations on the method parameters.
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.</li>
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified
* {@link Converter}.</li>
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("go") // Asynchronous execution.
* void go(@Name("a") String a, @Name("b") int b, Callback<? super MyResult> callback);
* @POST("go") // Synchronous execution.
* MyResult go(@Name("a") String a, @Name("b") int b);
* }
* </pre>
*
* @param type to implement
*/
@SuppressWarnings("unchecked")
public <T> T create(Class<T> type) {
if (!type.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(type.getClassLoader(), new Class<?>[] { type },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.setApiUrl(serverUrl)
.setArgs(args)
.setHeaders(headers.get())
.setMethodInfo(methodDetails)
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.type;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (Header header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (Header header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.restMethod.value(), serverUrl,
methodDetails.path, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
private Headers headers;
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
public Builder setHeaders(Headers headers) {
if (headers == null) throw new NullPointerException("headers");
this.headers = headers;
return this;
}
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, headers,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
if (headers == null) {
headers = Headers.NONE;
}
}
}
}
Left
// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Converts Java method calls to Rest calls.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
private final HeaderPairs headers;
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, HeaderPairs headers, Converter converter, Profiler profiler,
Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.headers = headers;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Adapts a Java interface to a REST API.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The names of URL parameters are retrieved from {@link Name}
* annotations on the method parameters.
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.</li>
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified
* {@link Converter}.</li>
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("go") // Asynchronous execution.
* void go(@Name("a") String a, @Name("b") int b, Callback<? super MyResult> callback);
* @POST("go") // Synchronous execution.
* MyResult go(@Name("a") String a, @Name("b") int b);
* }
* </pre>
*
* @param type to implement
*/
@SuppressWarnings("unchecked")
public <T> T create(Class<T> type) {
if (!type.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(type.getClassLoader(), new Class<?>[] { type },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.setApiUrl(serverUrl)
.setArgs(args)
.setHeaders(headers.get())
.setMethodInfo(methodDetails)
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.type;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (HeaderPair header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (HeaderPair header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.restMethod.value(), serverUrl,
methodDetails.path, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
private HeaderPairs headers;
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
public Builder setHeaders(HeaderPairs headers) {
if (headers == null) throw new NullPointerException("headers");
this.headers = headers;
return this;
}
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, headers,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
if (headers == null) {
headers = HeaderPairs.NONE;
}
}
}
}
// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Converts Java method calls to Rest calls.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
private final HeaderPairs headers;
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, HeaderPairs headers, Converter converter, Profiler profiler,
Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.headers = headers;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Adapts a Java interface to a REST API.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The names of URL parameters are retrieved from {@link Name}
* annotations on the method parameters.
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.</li>
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified
* {@link Converter}.</li>
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("go") // Asynchronous execution.
* void go(@Name("a") String a, @Name("b") int b, Callback<? super MyResult> callback);
* @POST("go") // Synchronous execution.
* MyResult go(@Name("a") String a, @Name("b") int b);
* }
* </pre>
*
* @param type to implement
*/
@SuppressWarnings("unchecked")
public <T> T create(Class<T> type) {
if (!type.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(type.getClassLoader(), new Class<?>[] { type },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.setApiUrl(serverUrl)
.setArgs(args)
.setHeaders(headers.get())
.setMethodInfo(methodDetails)
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.type;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (HeaderPair header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (HeaderPair header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.restMethod.value(), serverUrl,
methodDetails.path, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
private HeaderPairs headers;
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
public Builder setHeaders(HeaderPairs headers) {
if (headers == null) throw new NullPointerException("headers");
this.headers = headers;
return this;
}
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, headers,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
if (headers == null) {
headers = HeaderPairs.NONE;
}
}
}
}
Right
// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Header;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Adapts a Java interface to a REST API.
* <p>
* API endpoints are defined as methods on an interface with annotation providing metadata about
* the form in which the HTTP call should be made.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The built-in methods are {@link GET}, {@link PUT}, {@link POST}, {@link HEAD},
* and {@link DELETE}. You can define your own HTTP method by creating an annotation that takes a
* {code String} value and itself is annotated with {@link RestMethod @RestMethod}.
* <p>
* Method parameters can be used to replace parts of the URL by annotating them with {@link Path}.
* Replacement sections are denoted by an identifier surrounded by curly braces (e.g., "{foo}").
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified {@link Converter}.
* </ul>
* <p>
* An parameter which will represents the body of the request using the {@link Body} annotation.
* The object will be converted to request representation by a call to
* {@link Converter#toBody(Object) toBody} on the supplied {@link Converter} for this instance. The
* body can also be a {@link TypedOutput} where it will be used directly.
* <p>
* Alternative request body formats are supported by method annotations and corresponding parameter
* annotations:
* <ul>
* <li>{@link FormEncoded @FormEncoded} - Form-encoded data with pairs specified by the
* {@link Pair @Pair} parameter annotation.
* <li>{@link Multipart @Multipart} - RFC 2387-compliant multi-part data with parts specified by
* the {@link Part @Part} parameter annotation.
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("/category/{cat}") // Asynchronous execution.
* void categoryList(@Path("cat") String a, @Query("page") int b, Callback<List<Item>> cb);
* @POST("/category/{cat}") // Synchronous execution.
* List<Item> categoryList(@Path("cat") String a, @Query("page") int b);
* }
* </pre>
* <p>
* Calling {@link #create(Class)} with {@code MyApi.class} will validate and create a new
* implementation of the API.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
private final RequestHeaders requestHeaders;
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, RequestHeaders requestHeaders, Converter converter,
Profiler profiler, Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.requestHeaders = requestHeaders;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/** Create an implementation of the API defined by the specified {@code service} interface. */
@SuppressWarnings("unchecked")
public <T> T create(Class<T> service) {
if (!service.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(service.getClassLoader(), new Class<?>[] { service },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.apiUrl(serverUrl) //
.args(args) //
.headers(requestHeaders.get()) //
.methodInfo(methodDetails) //
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.responseObjectType;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (Header header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (Header header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.requestMethod, serverUrl,
methodDetails.requestUrl, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
private RequestHeaders requestHeaders;
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
public Builder setRequestHeaders(RequestHeaders requestHeaders) {
if (requestHeaders == null) throw new NullPointerException("requestHeaders");
this.requestHeaders = requestHeaders;
return this;
}
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, requestHeaders,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
if (requestHeaders == null) {
requestHeaders = RequestHeaders.NONE;
}
}
}
}
// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Header;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Adapts a Java interface to a REST API.
* <p>
* API endpoints are defined as methods on an interface with annotation providing metadata about
* the form in which the HTTP call should be made.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The built-in methods are {@link GET}, {@link PUT}, {@link POST}, {@link HEAD},
* and {@link DELETE}. You can define your own HTTP method by creating an annotation that takes a
* {code String} value and itself is annotated with {@link RestMethod @RestMethod}.
* <p>
* Method parameters can be used to replace parts of the URL by annotating them with {@link Path}.
* Replacement sections are denoted by an identifier surrounded by curly braces (e.g., "{foo}").
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified {@link Converter}.
* </ul>
* <p>
* An parameter which will represents the body of the request using the {@link Body} annotation.
* The object will be converted to request representation by a call to
* {@link Converter#toBody(Object) toBody} on the supplied {@link Converter} for this instance. The
* body can also be a {@link TypedOutput} where it will be used directly.
* <p>
* Alternative request body formats are supported by method annotations and corresponding parameter
* annotations:
* <ul>
* <li>{@link FormEncoded @FormEncoded} - Form-encoded data with pairs specified by the
* {@link Pair @Pair} parameter annotation.
* <li>{@link Multipart @Multipart} - RFC 2387-compliant multi-part data with parts specified by
* the {@link Part @Part} parameter annotation.
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("/category/{cat}") // Asynchronous execution.
* void categoryList(@Path("cat") String a, @Query("page") int b, Callback<List<Item>> cb);
* @POST("/category/{cat}") // Synchronous execution.
* List<Item> categoryList(@Path("cat") String a, @Query("page") int b);
* }
* </pre>
* <p>
* Calling {@link #create(Class)} with {@code MyApi.class} will validate and create a new
* implementation of the API.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
private final RequestHeaders requestHeaders;
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, RequestHeaders requestHeaders, Converter converter,
Profiler profiler, Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.requestHeaders = requestHeaders;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/** Create an implementation of the API defined by the specified {@code service} interface. */
@SuppressWarnings("unchecked")
public <T> T create(Class<T> service) {
if (!service.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(service.getClassLoader(), new Class<?>[] { service },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.apiUrl(serverUrl) //
.args(args) //
.headers(requestHeaders.get()) //
.methodInfo(methodDetails) //
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.responseObjectType;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (Header header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (Header header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.requestMethod, serverUrl,
methodDetails.requestUrl, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
private RequestHeaders requestHeaders;
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
public Builder setRequestHeaders(RequestHeaders requestHeaders) {
if (requestHeaders == null) throw new NullPointerException("requestHeaders");
this.requestHeaders = requestHeaders;
return this;
}
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, requestHeaders,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
if (requestHeaders == null) {
requestHeaders = RequestHeaders.NONE;
}
}
}
}
MergeMethods
// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Header;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Converts Java method calls to Rest calls.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
<<<<<<< MINE
private final HeaderPairs headers;
=======
>>>>>>> YOURS
private final RequestHeaders requestHeaders;
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
<<<<<<< MINE
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, HeaderPairs headers, Converter converter, Profiler profiler,
Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.headers = headers;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
=======
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, RequestHeaders requestHeaders, Converter converter,
Profiler profiler, Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.requestHeaders = requestHeaders;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
>>>>>>> YOURS
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Adapts a Java interface to a REST API.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The names of URL parameters are retrieved from {@link Name}
* annotations on the method parameters.
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.</li>
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified
* {@link Converter}.</li>
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("go") // Asynchronous execution.
* void go(@Name("a") String a, @Name("b") int b, Callback<? super MyResult> callback);
* @POST("go") // Synchronous execution.
* MyResult go(@Name("a") String a, @Name("b") int b);
* }
* </pre>
*
* @param type to implement
*/
@SuppressWarnings("unchecked")
public <T> T create(Class<T> service) {
if (!service.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(service.getClassLoader(), new Class<?>[] { service },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.apiUrl(serverUrl) //
.args(args) //
.headers(requestHeaders.get()) //
.methodInfo(methodDetails) //
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.responseObjectType;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (HeaderPair header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (HeaderPair header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.requestMethod, serverUrl,
methodDetails.requestUrl, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
<<<<<<< MINE
private HeaderPairs headers;
=======
>>>>>>> YOURS
private RequestHeaders requestHeaders;
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
<<<<<<< MINE
public Builder setHeaders(HeaderPairs headers) {
if (headers == null) throw new NullPointerException("headers");
this.headers = headers;
return this;
}
=======
public Builder setRequestHeaders(RequestHeaders requestHeaders) {
if (requestHeaders == null) throw new NullPointerException("requestHeaders");
this.requestHeaders = requestHeaders;
return this;
}
>>>>>>> YOURS
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, requestHeaders,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
<<<<<<< MINE
if (headers == null) {
headers = HeaderPairs.NONE;
=======
if (requestHeaders == null) {
requestHeaders = RequestHeaders.NONE;
>>>>>>> YOURS
}
}
}
}
// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Header;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Converts Java method calls to Rest calls.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
<<<<<<< MINE
private final HeaderPairs headers;
=======
>>>>>>> YOURS
private final RequestHeaders requestHeaders;
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
<<<<<<< MINE
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, HeaderPairs headers, Converter converter, Profiler profiler,
Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.headers = headers;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
=======
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, RequestHeaders requestHeaders, Converter converter,
Profiler profiler, Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.requestHeaders = requestHeaders;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
>>>>>>> YOURS
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Adapts a Java interface to a REST API.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The names of URL parameters are retrieved from {@link Name}
* annotations on the method parameters.
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.</li>
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified
* {@link Converter}.</li>
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("go") // Asynchronous execution.
* void go(@Name("a") String a, @Name("b") int b, Callback<? super MyResult> callback);
* @POST("go") // Synchronous execution.
* MyResult go(@Name("a") String a, @Name("b") int b);
* }
* </pre>
*
* @param type to implement
*/
@SuppressWarnings("unchecked")
public <T> T create(Class<T> service) {
if (!service.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(service.getClassLoader(), new Class<?>[] { service },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.apiUrl(serverUrl) //
.args(args) //
.headers(requestHeaders.get()) //
.methodInfo(methodDetails) //
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.responseObjectType;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (HeaderPair header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (HeaderPair header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.requestMethod, serverUrl,
methodDetails.requestUrl, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
<<<<<<< MINE
private HeaderPairs headers;
=======
>>>>>>> YOURS
private RequestHeaders requestHeaders;
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
<<<<<<< MINE
public Builder setHeaders(HeaderPairs headers) {
if (headers == null) throw new NullPointerException("headers");
this.headers = headers;
return this;
}
=======
public Builder setRequestHeaders(RequestHeaders requestHeaders) {
if (requestHeaders == null) throw new NullPointerException("requestHeaders");
this.requestHeaders = requestHeaders;
return this;
}
>>>>>>> YOURS
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, requestHeaders,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
<<<<<<< MINE
if (headers == null) {
headers = HeaderPairs.NONE;
=======
if (requestHeaders == null) {
requestHeaders = RequestHeaders.NONE;
>>>>>>> YOURS
}
}
}
}
KeepBothMethods
// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Header;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Converts Java method calls to Rest calls.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
<<<<<<< MINE
private final HeaderPairs headers;
=======
>>>>>>> YOURS
private final RequestHeaders requestHeaders;
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, HeaderPairs headers, Converter converter, Profiler profiler,
Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.headers = headers;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, RequestHeaders requestHeaders, Converter converter,
Profiler profiler, Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.requestHeaders = requestHeaders;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Adapts a Java interface to a REST API.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The names of URL parameters are retrieved from {@link Name}
* annotations on the method parameters.
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.</li>
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified
* {@link Converter}.</li>
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("go") // Asynchronous execution.
* void go(@Name("a") String a, @Name("b") int b, Callback<? super MyResult> callback);
* @POST("go") // Synchronous execution.
* MyResult go(@Name("a") String a, @Name("b") int b);
* }
* </pre>
*
* @param type to implement
*/
@SuppressWarnings("unchecked")
public <T> T create(Class<T> service) {
if (!service.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(service.getClassLoader(), new Class<?>[] { service },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.apiUrl(serverUrl) //
.args(args) //
.headers(requestHeaders.get()) //
.methodInfo(methodDetails) //
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.responseObjectType;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (HeaderPair header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (HeaderPair header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.requestMethod, serverUrl,
methodDetails.requestUrl, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
<<<<<<< MINE
private HeaderPairs headers;
=======
>>>>>>> YOURS
private RequestHeaders requestHeaders;
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
public Builder setHeaders(HeaderPairs headers) {
if (headers == null) throw new NullPointerException("headers");
this.headers = headers;
return this;
}
public Builder setRequestHeaders(RequestHeaders requestHeaders) {
if (requestHeaders == null) throw new NullPointerException("requestHeaders");
this.requestHeaders = requestHeaders;
return this;
}
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, requestHeaders,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
<<<<<<< MINE
if (headers == null) {
headers = HeaderPairs.NONE;
=======
if (requestHeaders == null) {
requestHeaders = RequestHeaders.NONE;
>>>>>>> YOURS
}
}
}
}
// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Header;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Converts Java method calls to Rest calls.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
<<<<<<< MINE
private final HeaderPairs headers;
=======
>>>>>>> YOURS
private final RequestHeaders requestHeaders;
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, HeaderPairs headers, Converter converter, Profiler profiler,
Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.headers = headers;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, RequestHeaders requestHeaders, Converter converter,
Profiler profiler, Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.requestHeaders = requestHeaders;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Adapts a Java interface to a REST API.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The names of URL parameters are retrieved from {@link Name}
* annotations on the method parameters.
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.</li>
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified
* {@link Converter}.</li>
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("go") // Asynchronous execution.
* void go(@Name("a") String a, @Name("b") int b, Callback<? super MyResult> callback);
* @POST("go") // Synchronous execution.
* MyResult go(@Name("a") String a, @Name("b") int b);
* }
* </pre>
*
* @param type to implement
*/
@SuppressWarnings("unchecked")
public <T> T create(Class<T> service) {
if (!service.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(service.getClassLoader(), new Class<?>[] { service },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.apiUrl(serverUrl) //
.args(args) //
.headers(requestHeaders.get()) //
.methodInfo(methodDetails) //
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.responseObjectType;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (HeaderPair header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (HeaderPair header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.requestMethod, serverUrl,
methodDetails.requestUrl, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
<<<<<<< MINE
private HeaderPairs headers;
=======
>>>>>>> YOURS
private RequestHeaders requestHeaders;
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
public Builder setHeaders(HeaderPairs headers) {
if (headers == null) throw new NullPointerException("headers");
this.headers = headers;
return this;
}
public Builder setRequestHeaders(RequestHeaders requestHeaders) {
if (requestHeaders == null) throw new NullPointerException("requestHeaders");
this.requestHeaders = requestHeaders;
return this;
}
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, requestHeaders,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
<<<<<<< MINE
if (headers == null) {
headers = HeaderPairs.NONE;
=======
if (requestHeaders == null) {
requestHeaders = RequestHeaders.NONE;
>>>>>>> YOURS
}
}
}
}
Safe
// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Header;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Converts Java method calls to Rest calls.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
<<<<<<< MINE
private final HeaderPairs headers;
=======
>>>>>>> YOURS
private final RequestHeaders requestHeaders;
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, HeaderPairs headers, Converter converter, Profiler profiler,
Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.headers = headers;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, RequestHeaders requestHeaders, Converter converter,
Profiler profiler, Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.requestHeaders = requestHeaders;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Adapts a Java interface to a REST API.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The names of URL parameters are retrieved from {@link Name}
* annotations on the method parameters.
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.</li>
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified
* {@link Converter}.</li>
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("go") // Asynchronous execution.
* void go(@Name("a") String a, @Name("b") int b, Callback<? super MyResult> callback);
* @POST("go") // Synchronous execution.
* MyResult go(@Name("a") String a, @Name("b") int b);
* }
* </pre>
*
* @param type to implement
*/
@SuppressWarnings("unchecked")
public <T> T create(Class<T> service) {
if (!service.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(service.getClassLoader(), new Class<?>[] { service },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.apiUrl(serverUrl) //
.args(args) //
.headers(requestHeaders.get()) //
.methodInfo(methodDetails) //
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.responseObjectType;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (HeaderPair header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (HeaderPair header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.requestMethod, serverUrl,
methodDetails.requestUrl, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
<<<<<<< MINE
private HeaderPairs headers;
=======
>>>>>>> YOURS
private RequestHeaders requestHeaders;
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
public Builder setHeaders(HeaderPairs headers) {
if (headers == null) throw new NullPointerException("headers");
this.headers = headers;
return this;
}
public Builder setRequestHeaders(RequestHeaders requestHeaders) {
if (requestHeaders == null) throw new NullPointerException("requestHeaders");
this.requestHeaders = requestHeaders;
return this;
}
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, requestHeaders,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
<<<<<<< MINE
if (headers == null) {
headers = HeaderPairs.NONE;
=======
if (requestHeaders == null) {
requestHeaders = RequestHeaders.NONE;
>>>>>>> YOURS
}
}
}
}
// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Header;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Converts Java method calls to Rest calls.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
<<<<<<< MINE
private final HeaderPairs headers;
=======
>>>>>>> YOURS
private final RequestHeaders requestHeaders;
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, HeaderPairs headers, Converter converter, Profiler profiler,
Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.headers = headers;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
Executor callbackExecutor, RequestHeaders requestHeaders, Converter converter,
Profiler profiler, Log log, boolean debug) {
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.requestHeaders = requestHeaders;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/**
* Adapts a Java interface to a REST API.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The names of URL parameters are retrieved from {@link Name}
* annotations on the method parameters.
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.</li>
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified
* {@link Converter}.</li>
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("go") // Asynchronous execution.
* void go(@Name("a") String a, @Name("b") int b, Callback<? super MyResult> callback);
* @POST("go") // Synchronous execution.
* MyResult go(@Name("a") String a, @Name("b") int b);
* }
* </pre>
*
* @param type to implement
*/
@SuppressWarnings("unchecked")
public <T> T create(Class<T> service) {
if (!service.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(service.getClassLoader(), new Class<?>[] { service },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.apiUrl(serverUrl) //
.args(args) //
.headers(requestHeaders.get()) //
.methodInfo(methodDetails) //
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.responseObjectType;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (HeaderPair header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (HeaderPair header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.requestMethod, serverUrl,
methodDetails.requestUrl, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
<<<<<<< MINE
private HeaderPairs headers;
=======
>>>>>>> YOURS
private RequestHeaders requestHeaders;
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
public Builder setHeaders(HeaderPairs headers) {
if (headers == null) throw new NullPointerException("headers");
this.headers = headers;
return this;
}
public Builder setRequestHeaders(RequestHeaders requestHeaders) {
if (requestHeaders == null) throw new NullPointerException("requestHeaders");
this.requestHeaders = requestHeaders;
return this;
}
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, requestHeaders,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
<<<<<<< MINE
if (headers == null) {
headers = HeaderPairs.NONE;
=======
if (requestHeaders == null) {
requestHeaders = RequestHeaders.NONE;
>>>>>>> YOURS
}
}
}
}
Unstructured
// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Header;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Adapts a Java interface to a REST API.
* <p>
* API endpoints are defined as methods on an interface with annotation providing metadata about
* the form in which the HTTP call should be made.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The built-in methods are {@link GET}, {@link PUT}, {@link POST}, {@link HEAD},
* and {@link DELETE}. You can define your own HTTP method by creating an annotation that takes a
* {code String} value and itself is annotated with {@link RestMethod @RestMethod}.
* <p>
* Method parameters can be used to replace parts of the URL by annotating them with {@link Path}.
* Replacement sections are denoted by an identifier surrounded by curly braces (e.g., "{foo}").
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified {@link Converter}.
* </ul>
* <p>
* An parameter which will represents the body of the request using the {@link Body} annotation.
* The object will be converted to request representation by a call to
* {@link Converter#toBody(Object) toBody} on the supplied {@link Converter} for this instance. The
* body can also be a {@link TypedOutput} where it will be used directly.
* <p>
* Alternative request body formats are supported by method annotations and corresponding parameter
* annotations:
* <ul>
* <li>{@link FormEncoded @FormEncoded} - Form-encoded data with pairs specified by the
* {@link Pair @Pair} parameter annotation.
* <li>{@link Multipart @Multipart} - RFC 2387-compliant multi-part data with parts specified by
* the {@link Part @Part} parameter annotation.
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("/category/{cat}") // Asynchronous execution.
* void categoryList(@Path("cat") String a, @Query("page") int b, Callback<List<Item>> cb);
* @POST("/category/{cat}") // Synchronous execution.
* List<Item> categoryList(@Path("cat") String a, @Query("page") int b);
* }
* </pre>
* <p>
* Calling {@link #create(Class)} with {@code MyApi.class} will validate and create a new
* implementation of the API.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
<<<<<<< MINE
private final HeaderPairs headers;
=======
private final RequestHeaders requestHeaders;
>>>>>>> YOURS
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
<<<<<<< MINE
Executor callbackExecutor, HeaderPairs headers, Converter converter, Profiler profiler,
Log log, boolean debug) {
=======
Executor callbackExecutor, RequestHeaders requestHeaders, Converter converter,
Profiler profiler, Log log, boolean debug) {
>>>>>>> YOURS
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.requestHeaders = requestHeaders;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/** Create an implementation of the API defined by the specified {@code service} interface. */
@SuppressWarnings("unchecked")
public <T> T create(Class<T> service) {
if (!service.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(service.getClassLoader(), new Class<?>[] { service },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.apiUrl(serverUrl) //
.args(args) //
.headers(requestHeaders.get()) //
.methodInfo(methodDetails) //
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.responseObjectType;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (HeaderPair header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (HeaderPair header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.requestMethod, serverUrl,
methodDetails.requestUrl, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
<<<<<<< MINE
private HeaderPairs headers;
=======
private RequestHeaders requestHeaders;
>>>>>>> YOURS
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
<<<<<<< MINE
public Builder setHeaders(HeaderPairs headers) {
if (headers == null) throw new NullPointerException("headers");
this.headers = headers;
=======
public Builder setRequestHeaders(RequestHeaders requestHeaders) {
if (requestHeaders == null) throw new NullPointerException("requestHeaders");
this.requestHeaders = requestHeaders;
>>>>>>> YOURS
return this;
}
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, requestHeaders,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
<<<<<<< MINE
if (headers == null) {
headers = HeaderPairs.NONE;
=======
if (requestHeaders == null) {
requestHeaders = RequestHeaders.NONE;
>>>>>>> YOURS
}
}
}
}// Copyright 2012 Square, Inc.
package retrofit.http;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.lang.reflect.Type;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import retrofit.http.Profiler.RequestInformation;
import retrofit.http.client.Client;
import retrofit.http.client.Header;
import retrofit.http.client.Request;
import retrofit.http.client.Response;
import retrofit.http.mime.TypedByteArray;
import retrofit.http.mime.TypedInput;
import retrofit.http.mime.TypedOutput;
import static retrofit.http.Utils.SynchronousExecutor;
/**
* Adapts a Java interface to a REST API.
* <p>
* API endpoints are defined as methods on an interface with annotation providing metadata about
* the form in which the HTTP call should be made.
* <p>
* The relative path for a given method is obtained from an annotation on the method describing
* the request type. The built-in methods are {@link GET}, {@link PUT}, {@link POST}, {@link HEAD},
* and {@link DELETE}. You can define your own HTTP method by creating an annotation that takes a
* {code String} value and itself is annotated with {@link RestMethod @RestMethod}.
* <p>
* Method parameters can be used to replace parts of the URL by annotating them with {@link Path}.
* Replacement sections are denoted by an identifier surrounded by curly braces (e.g., "{foo}").
* <p>
* HTTP requests happen in one of two ways:
* <ul>
* <li>On the provided HTTP {@link Executor} with callbacks marshaled to the callback
* {@link Executor}. The last method parameter should be of type {@link Callback}. The HTTP
* response will be converted to the callback's parameter type using the specified
* {@link Converter}. If the callback parameter type uses a wildcard, the lower bound will be
* used as the conversion type.
* <li>On the current thread returning the response or throwing a {@link RetrofitError}. The HTTP
* response will be converted to the method's return type using the specified {@link Converter}.
* </ul>
* <p>
* An parameter which will represents the body of the request using the {@link Body} annotation.
* The object will be converted to request representation by a call to
* {@link Converter#toBody(Object) toBody} on the supplied {@link Converter} for this instance. The
* body can also be a {@link TypedOutput} where it will be used directly.
* <p>
* Alternative request body formats are supported by method annotations and corresponding parameter
* annotations:
* <ul>
* <li>{@link FormEncoded @FormEncoded} - Form-encoded data with pairs specified by the
* {@link Pair @Pair} parameter annotation.
* <li>{@link Multipart @Multipart} - RFC 2387-compliant multi-part data with parts specified by
* the {@link Part @Part} parameter annotation.
* </ul>
* <p>
* For example:
* <pre>
* public interface MyApi {
* @POST("/category/{cat}") // Asynchronous execution.
* void categoryList(@Path("cat") String a, @Query("page") int b, Callback<List<Item>> cb);
* @POST("/category/{cat}") // Synchronous execution.
* List<Item> categoryList(@Path("cat") String a, @Query("page") int b);
* }
* </pre>
* <p>
* Calling {@link #create(Class)} with {@code MyApi.class} will validate and create a new
* implementation of the API.
*
* @author Bob Lee (bob@squareup.com)
* @author Jake Wharton (jw@squareup.com)
*/
public class RestAdapter {
private static final int LOG_CHUNK_SIZE = 4000;
static final String THREAD_PREFIX = "Retrofit-";
static final String IDLE_THREAD_NAME = THREAD_PREFIX + "Idle";
/** Simple logging abstraction for debug messages. */
public interface Log {
/** Log a debug message to the appropriate console. */
void log(String message);
}
private final Server server;
private final Client.Provider clientProvider;
private final Executor httpExecutor;
private final Executor callbackExecutor;
<<<<<<< MINE
private final HeaderPairs headers;
=======
private final RequestHeaders requestHeaders;
>>>>>>> YOURS
private final Converter converter;
private final Profiler profiler;
private final Log log;
private volatile boolean debug;
private RestAdapter(Server server, Client.Provider clientProvider, Executor httpExecutor,
<<<<<<< MINE
Executor callbackExecutor, HeaderPairs headers, Converter converter, Profiler profiler,
Log log, boolean debug) {
=======
Executor callbackExecutor, RequestHeaders requestHeaders, Converter converter,
Profiler profiler, Log log, boolean debug) {
>>>>>>> YOURS
this.server = server;
this.clientProvider = clientProvider;
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
this.requestHeaders = requestHeaders;
this.converter = converter;
this.profiler = profiler;
this.log = log;
this.debug = debug;
}
/** Toggle debug logging on and off. */
public void setDebug(boolean debug) {
this.debug = debug;
}
/** Create an implementation of the API defined by the specified {@code service} interface. */
@SuppressWarnings("unchecked")
public <T> T create(Class<T> service) {
if (!service.isInterface()) {
throw new IllegalArgumentException("Only interface endpoint definitions are supported.");
}
return (T) Proxy.newProxyInstance(service.getClassLoader(), new Class<?>[] { service },
new RestHandler());
}
private class RestHandler implements InvocationHandler {
private final Map<Method, RestMethodInfo> methodDetailsCache =
new LinkedHashMap<Method, RestMethodInfo>();
@SuppressWarnings("unchecked") //
@Override public Object invoke(Object proxy, Method method, final Object[] args)
throws InvocationTargetException, IllegalAccessException {
// If the method is a method from Object then defer to normal invocation.
if (method.getDeclaringClass() == Object.class) {
return method.invoke(this, args);
}
// Load or create the details cache for the current method.
final RestMethodInfo methodDetails;
synchronized (methodDetailsCache) {
RestMethodInfo tempMethodDetails = methodDetailsCache.get(method);
if (tempMethodDetails == null) {
tempMethodDetails = new RestMethodInfo(method);
methodDetailsCache.put(method, tempMethodDetails);
}
methodDetails = tempMethodDetails;
}
if (methodDetails.isSynchronous) {
return invokeRequest(methodDetails, args);
}
if (httpExecutor == null || callbackExecutor == null) {
throw new IllegalStateException("Asynchronous invocation requires calling setExecutors.");
}
Callback<?> callback = (Callback<?>) args[args.length - 1];
httpExecutor.execute(new CallbackRunnable(callback, callbackExecutor) {
@Override public ResponseWrapper obtainResponse() {
return (ResponseWrapper) invokeRequest(methodDetails, args);
}
});
return null; // Asynchronous methods should have return type of void.
}
/**
* Execute an HTTP request.
*
* @return HTTP response object of specified {@code type}.
* @throws RetrofitError Thrown if any error occurs during the HTTP request.
*/
private Object invokeRequest(RestMethodInfo methodDetails, Object[] args) {
methodDetails.init(); // Ensure all relevant method information has been loaded.
String serverUrl = server.getUrl();
String url = serverUrl; // Keep some url in case RequestBuilder throws an exception.
try {
Request request = new RequestBuilder(converter) //
.apiUrl(serverUrl) //
.args(args) //
.headers(requestHeaders.get()) //
.methodInfo(methodDetails) //
.build();
url = request.getUrl();
if (!methodDetails.isSynchronous) {
// If we are executing asynchronously then update the current thread with a useful name.
Thread.currentThread().setName(THREAD_PREFIX + url.substring(serverUrl.length()));
}
if (debug) {
request = logAndReplaceRequest(request);
}
Object profilerObject = null;
if (profiler != null) {
profilerObject = profiler.beforeCall();
}
long start = System.nanoTime();
Response response = clientProvider.get().execute(request);
long elapsedTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
int statusCode = response.getStatus();
if (profiler != null) {
RequestInformation requestInfo = getRequestInfo(serverUrl, methodDetails, request);
profiler.afterCall(requestInfo, elapsedTime, statusCode, profilerObject);
}
if (debug) {
response = logAndReplaceResponse(url, response, elapsedTime);
}
Type type = methodDetails.responseObjectType;
if (statusCode >= 200 && statusCode < 300) { // 2XX == successful request
// Caller requested the raw Response object directly.
if (type.equals(Response.class)) {
// Read the entire stream and replace with one backed by a byte[]
response = Utils.readBodyToBytesIfNecessary(response);
if (methodDetails.isSynchronous) {
return response;
}
return new ResponseWrapper(response, response);
}
TypedInput body = response.getBody();
if (body == null) {
return new ResponseWrapper(response, null);
}
try {
Object convert = converter.fromBody(body, type);
if (methodDetails.isSynchronous) {
return convert;
}
return new ResponseWrapper(response, convert);
} catch (ConversionException e) {
// The response body was partially read by the converter. Replace it with null.
response = Utils.replaceResponseBody(response, null);
throw RetrofitError.conversionError(url, response, converter, type, e);
}
}
response = Utils.readBodyToBytesIfNecessary(response);
throw RetrofitError.httpError(url, response, converter, type);
} catch (RetrofitError e) {
throw e; // Pass through our own errors.
} catch (IOException e) {
throw RetrofitError.networkError(url, e);
} catch (Throwable t) {
throw RetrofitError.unexpectedError(url, t);
} finally {
if (!methodDetails.isSynchronous) {
Thread.currentThread().setName(IDLE_THREAD_NAME);
}
}
}
}
/** Log request headers and body. Consumes request body and returns identical replacement. */
private Request logAndReplaceRequest(Request request) throws IOException {
log.log(String.format("---> HTTP %s %s", request.getMethod(), request.getUrl()));
for (HeaderPair header : request.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedOutput body = request.getBody();
int bodySize = 0;
if (body != null) {
if (!request.getHeaders().isEmpty()) {
log.log("");
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
body.writeTo(baos);
byte[] bodyBytes = baos.toByteArray();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyString = new String(bodyBytes, Utils.parseCharset(bodyMime));
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
body = new TypedByteArray(bodyMime, bodyBytes);
}
log.log(String.format("---> END HTTP (%s-byte body)", bodySize));
// Since we consumed the original request, return a new, identical one from its bytes.
return new Request(request.getMethod(), request.getUrl(), request.getHeaders(), body);
}
/** Log response headers and body. Consumes response body and returns identical replacement. */
private Response logAndReplaceResponse(String url, Response response, long elapsedTime)
throws IOException {
log.log(String.format("<--- HTTP %s %s (%sms)", response.getStatus(), url, elapsedTime));
for (HeaderPair header : response.getHeaders()) {
log.log(header.getName() + ": " + header.getValue());
}
TypedInput body = response.getBody();
int bodySize = 0;
if (body != null) {
if (!response.getHeaders().isEmpty()) {
log.log("");
}
if (!(body instanceof TypedByteArray)) {
// Read the entire response body to we can log it and replace the original response
response = Utils.readBodyToBytesIfNecessary(response);
body = response.getBody();
}
byte[] bodyBytes = ((TypedByteArray) body).getBytes();
bodySize = bodyBytes.length;
String bodyMime = body.mimeType();
String bodyCharset = Utils.parseCharset(bodyMime);
String bodyString = new String(bodyBytes, bodyCharset);
for (int i = 0, len = bodyString.length(); i < len; i += LOG_CHUNK_SIZE) {
int end = Math.min(len, i + LOG_CHUNK_SIZE);
log.log(bodyString.substring(i, end));
}
}
log.log(String.format("<--- END HTTP (%s-byte body)", bodySize));
return response;
}
private static Profiler.RequestInformation getRequestInfo(String serverUrl,
RestMethodInfo methodDetails, Request request) {
long contentLength = 0;
String contentType = null;
TypedOutput body = request.getBody();
if (body != null) {
contentLength = body.length();
contentType = body.mimeType();
}
return new Profiler.RequestInformation(methodDetails.requestMethod, serverUrl,
methodDetails.requestUrl, contentLength, contentType);
}
/**
* Build a new {@link RestAdapter}.
* <p>
* Calling the following methods is required before calling {@link #build()}:
* <ul>
* <li>{@link #setServer(Server)}</li>
* <li>{@link #setClient(Client.Provider)}</li>
* <li>{@link #setConverter(Converter)}</li>
* </ul>
* <p>
* If you are using asynchronous execution (i.e., with {@link Callback Callbacks}) the following
* is also required:
* <ul>
* <li>{@link #setExecutors(java.util.concurrent.Executor, java.util.concurrent.Executor)}</li>
* </ul>
*/
public static class Builder {
private Server server;
private Client.Provider clientProvider;
private Executor httpExecutor;
private Executor callbackExecutor;
<<<<<<< MINE
private HeaderPairs headers;
=======
private RequestHeaders requestHeaders;
>>>>>>> YOURS
private Converter converter;
private Profiler profiler;
private Log log;
private boolean debug;
public Builder setServer(String endpoint) {
if (endpoint == null) throw new NullPointerException("endpoint");
return setServer(new Server(endpoint));
}
public Builder setServer(Server server) {
if (server == null) throw new NullPointerException("server");
this.server = server;
return this;
}
public Builder setClient(final Client client) {
if (client == null) throw new NullPointerException("client");
return setClient(new Client.Provider() {
@Override public Client get() {
return client;
}
});
}
public Builder setClient(Client.Provider clientProvider) {
if (clientProvider == null) throw new NullPointerException("clientProvider");
this.clientProvider = clientProvider;
return this;
}
/**
* Executors used for asynchronous HTTP client downloads and callbacks.
*
* @param httpExecutor Executor on which HTTP client calls will be made.
* @param callbackExecutor Executor on which any {@link Callback} methods will be invoked. If
* this argument is {@code null} then callback methods will be run on the same thread as the
* HTTP client.
*/
public Builder setExecutors(Executor httpExecutor, Executor callbackExecutor) {
if (httpExecutor == null) throw new NullPointerException("httpExecutor");
if (callbackExecutor == null) callbackExecutor = new SynchronousExecutor();
this.httpExecutor = httpExecutor;
this.callbackExecutor = callbackExecutor;
return this;
}
<<<<<<< MINE
public Builder setHeaders(HeaderPairs headers) {
if (headers == null) throw new NullPointerException("headers");
this.headers = headers;
=======
public Builder setRequestHeaders(RequestHeaders requestHeaders) {
if (requestHeaders == null) throw new NullPointerException("requestHeaders");
this.requestHeaders = requestHeaders;
>>>>>>> YOURS
return this;
}
public Builder setConverter(Converter converter) {
if (converter == null) throw new NullPointerException("converter");
this.converter = converter;
return this;
}
public Builder setProfiler(Profiler profiler) {
if (profiler == null) throw new NullPointerException("profiler");
this.profiler = profiler;
return this;
}
public Builder setLog(Log log) {
if (log == null) throw new NullPointerException("log");
this.log = log;
return this;
}
public Builder setDebug(boolean debug) {
this.debug = debug;
return this;
}
public RestAdapter build() {
if (server == null) {
throw new IllegalArgumentException("Server may not be null.");
}
ensureSaneDefaults();
return new RestAdapter(server, clientProvider, httpExecutor, callbackExecutor, requestHeaders,
converter, profiler, log, debug);
}
private void ensureSaneDefaults() {
if (converter == null) {
converter = Platform.get().defaultConverter();
}
if (clientProvider == null) {
clientProvider = Platform.get().defaultClient();
}
if (httpExecutor == null) {
httpExecutor = Platform.get().defaultHttpExecutor();
}
if (callbackExecutor == null) {
callbackExecutor = Platform.get().defaultCallbackExecutor();
}
if (log == null) {
log = Platform.get().defaultLog();
}
<<<<<<< MINE
if (headers == null) {
headers = HeaderPairs.NONE;
=======
if (requestHeaders == null) {
requestHeaders = RequestHeaders.NONE;
>>>>>>> YOURS
}
}
}
}
Diff Result
No diff
Case 23 - roboguice.rev_bee33_b6d1a.ContextScope.java
public ContextScope(RoboApplication app)
Left modified body: now calls a new method passing constructor app parameter
Note: also removed the attribute RoboApplication app.
Right modified signature parameter type: RoboApplication → Application
Unstructured reported conflict between methods, and also included method void exit
Safe reported conflict between constructors
MergeMethods merged new signature with new body
Possible FN, as attribute now is not being initialized.
KeepBothMethods kept both versions of constructor
void exit
Left modified body and removed method annotation
Right removed method annotation
Unstructured did not match methods, although was on other conflict
Safe reported conflict between methods
MergeMethods reported conflict between methods
KeepBothMethods reported conflict between methods
Base
/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import roboguice.application.RoboApplication;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
/**
*
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected final ThreadLocal<Map<Key<Context>, Object>> values = new ThreadLocal<Map<Key<Context>, Object>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
protected RoboApplication app;
public ContextScope( RoboApplication app ) {
this.app = app;
}
/**
* Scopes can be entered multiple times with no problems (eg. from
* onCreate(), onStart(), etc). However, once they're closed, all their
* previous values are gone forever until the scope is reinitialized again
* via enter().
*/
public void enter(Context context) {
Map<Key<Context>,Object> map = values.get();
if( map==null ) {
map = new HashMap<Key<Context>,Object>();
values.set(map);
}
map.put(Key.get(Context.class), context);
}
@SuppressWarnings({"UnusedParameters"})
public void exit(Context ignored) {
values.remove();
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i) {
viewsForInjection.remove(i).reallyInjectMembers();
}
}
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
@SuppressWarnings({"SuspiciousMethodCalls", "unchecked"})
public T get() {
final Map<Key<Context>, Object> scopedObjects = getScopedObjectMap(key);
@SuppressWarnings("unchecked")
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put((Key<Context>) key, current);
}
return current;
}
};
}
@SuppressWarnings({"UnusedParameters"})
protected <T> Map<Key<Context>, Object> getScopedObjectMap(Key<T> key) {
final Map<Key<Context>,Object> map = values.get();
return map!=null ? map : initialScopedObjectMap();
}
protected Map<Key<Context>,Object> initialScopedObjectMap() {
final HashMap<Key<Context>,Object> map = new HashMap<Key<Context>,Object>();
map.put(Key.get(Context.class),app);
return map;
}
}
/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import roboguice.application.RoboApplication;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
/**
*
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected final ThreadLocal<Map<Key<Context>, Object>> values = new ThreadLocal<Map<Key<Context>, Object>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
protected RoboApplication app;
public ContextScope( RoboApplication app ) {
this.app = app;
}
/**
* Scopes can be entered multiple times with no problems (eg. from
* onCreate(), onStart(), etc). However, once they're closed, all their
* previous values are gone forever until the scope is reinitialized again
* via enter().
*/
public void enter(Context context) {
Map<Key<Context>,Object> map = values.get();
if( map==null ) {
map = new HashMap<Key<Context>,Object>();
values.set(map);
}
map.put(Key.get(Context.class), context);
}
@SuppressWarnings({"UnusedParameters"})
public void exit(Context ignored) {
values.remove();
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i) {
viewsForInjection.remove(i).reallyInjectMembers();
}
}
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
@SuppressWarnings({"SuspiciousMethodCalls", "unchecked"})
public T get() {
final Map<Key<Context>, Object> scopedObjects = getScopedObjectMap(key);
@SuppressWarnings("unchecked")
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put((Key<Context>) key, current);
}
return current;
}
};
}
@SuppressWarnings({"UnusedParameters"})
protected <T> Map<Key<Context>, Object> getScopedObjectMap(Key<T> key) {
final Map<Key<Context>,Object> map = values.get();
return map!=null ? map : initialScopedObjectMap();
}
protected Map<Key<Context>,Object> initialScopedObjectMap() {
final HashMap<Key<Context>,Object> map = new HashMap<Key<Context>,Object>();
map.put(Key.get(Context.class),app);
return map;
}
}
Left
/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import roboguice.application.RoboApplication;
import roboguice.util.Ln;
import roboguice.util.Strings;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import com.google.inject.internal.Maps;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Map;
import java.util.WeakHashMap;
/**
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected WeakHashMap<Context,Map<Key<?>, Object>> values = new WeakHashMap<Context,Map<Key<?>, Object>>();
protected ThreadLocal<WeakActiveStack<Context>> contextStack = new ThreadLocal<WeakActiveStack<Context>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
protected ArrayList<PreferenceMembersInjector<?>> preferencesForInjection = new ArrayList<PreferenceMembersInjector<?>>();
public ContextScope(RoboApplication app) {
enter(app);
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void registerPreferenceForInjection(PreferenceMembersInjector<?> injector) {
preferencesForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i)
viewsForInjection.remove(i).reallyInjectMembers();
}
public void injectPreferenceViews() {
for (int i = preferencesForInjection.size() - 1; i >= 0; --i)
preferencesForInjection.remove(i).reallyInjectMembers();
}
public void enter(Context context) {
ensureContextStack();
contextStack.get().push(context);
final Key<Context> key = Key.get(Context.class);
getScopedObjectMap(key).put(key, context);
if( Ln.isVerboseEnabled() ) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null )
Ln.v("Contexts in the %s scope map after inserting %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public void exit(Context context) {
ensureContextStack();
contextStack.get().remove(context);
}
public void dispose(Context context) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null ) {
final Map<Key<?>,Object> scopedObjects = map.remove(context);
if( scopedObjects!=null )
scopedObjects.clear();
if( Ln.isVerboseEnabled() )
Ln.v("Contexts in the %s scope map after removing %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
public T get() {
Map<Key<?>, Object> scopedObjects = getScopedObjectMap(key);
@SuppressWarnings("unchecked")
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put(key, current);
}
return current;
}
};
}
protected void ensureContextStack() {
if (contextStack.get() == null) {
contextStack.set(new WeakActiveStack<Context>());
}
}
protected <T> Map<Key<?>, Object> getScopedObjectMap(Key<T> key) {
final Context context = contextStack.get().peek();
Map<Key<?>,Object> scopedObjects = values.get(context);
if (scopedObjects == null) {
scopedObjects = Maps.newHashMap();
values.put(context, scopedObjects);
}
return scopedObjects;
}
/**
* A circular stack like structure of weak references.
* Calls to push while not add any new items to stack if the item already exists,
* it will simply bring the item to the top of the stack.
*
* Likewise, pop will not remove the item from the stack, it will simply make the next item
* the top, move the current top to the bottom. Thus creating a circular linked list type effect.
*
* To remove an item explicitly call the remove method.
*
* The stack also holds WeakReferences of T, these references will automatically be removed
* anytime the stack accessed. For performance they are only removed as they are encountered.
*
* So it is possible to get a null value back, even though you thought the stack had items in it.
* @param <T>
*/
public static class WeakActiveStack<T> {
static class Node<T> {
Node<T> previous;
Node<T> next;
WeakReference<T> value;
public Node(T value) {
this.value = new WeakReference<T>(value);
}
}
private Node<T> head;
private Node<T> tail;
/**
* Pushes the value onto the top of the stack.
* If the value exists in the stack it is simply brought to the top.
* @param value
*/
public void push(T value) {
if (head == null) {
head = new Node<T>(value);
tail = head;
} else {
Node<T> existingNode = findNode(value);
if (existingNode == null) {
Node<T> newNode = new Node<T>(value);
newNode.next = head;
head.previous = newNode;
head = newNode;
} else {
if (existingNode == head) return;
if (existingNode == tail) {
tail = existingNode.previous;
tail.next= null;
}
if (existingNode.previous != null) {
existingNode.previous.next = existingNode.next;
}
if (existingNode.next != null) {
existingNode.next.previous = existingNode.previous;
}
existingNode.next = head;
head.previous = existingNode;
head = existingNode;
head.previous = null;
}
}
}
/**
* Pops the first item off the stack, then moves it to the bottom.
* Popping is an infinite operation that will never end, it just keeps moving the top item to the bottom.
* Popping will also dispose of items whose weak references have been collected.
* @return The value of the item at the top of the stack.
*/
public T pop() {
WeakActiveStack.Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
if (node.next != null) {
head = node.next;
node.previous = tail;
tail.next = node;
node.next = null;
head.previous = null;
tail = node;
}
return value;
}
}
return null;
}
/**
* Non destructive read of the item at the top of stack.
* @return the first non collected referent held, or null if nothing is available.
*/
public T peek() {
Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
return value;
}
}
return null;
}
/**
* Removes the item from the stack.
* @param value
*/
public void remove(T value) {
Node<T> node = findNode(value);
disposeOfNode(node);
}
/**
* Removes a node ensuring all links are properly updated.
* @param node
* @return The next node in the stack.
*/
protected Node<T> disposeOfNode(Node<T> node) {
if (node == head) {
head = node.next;
if (head == null) {
tail = null;
} else {
head.previous = null;
}
}
if (node.previous != null) {
node.previous.next = node.next;
}
if (node.next != null) {
node.next.previous = node.previous;
}
if (node == tail) {
tail = node.previous;
tail.next = null;
}
return node.next;
}
/**
* Finds a node given a value
* Will dispose of nodes if needed as it iterates the stack.
* @param value
* @return The node if found or null
*/
protected Node<T> findNode(T value) {
Node<T> node = head;
while (node != null) {
final T nodeValue = node.value.get();
if (nodeValue == null) {
node = disposeOfNode(node);
} else if (nodeValue.equals(value)) {
return node;
} else {
node = node.next;
}
}
return null;
}
}
}
/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import roboguice.application.RoboApplication;
import roboguice.util.Ln;
import roboguice.util.Strings;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import com.google.inject.internal.Maps;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Map;
import java.util.WeakHashMap;
/**
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected WeakHashMap<Context,Map<Key<?>, Object>> values = new WeakHashMap<Context,Map<Key<?>, Object>>();
protected ThreadLocal<WeakActiveStack<Context>> contextStack = new ThreadLocal<WeakActiveStack<Context>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
protected ArrayList<PreferenceMembersInjector<?>> preferencesForInjection = new ArrayList<PreferenceMembersInjector<?>>();
public ContextScope(RoboApplication app) {
enter(app);
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void registerPreferenceForInjection(PreferenceMembersInjector<?> injector) {
preferencesForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i)
viewsForInjection.remove(i).reallyInjectMembers();
}
public void injectPreferenceViews() {
for (int i = preferencesForInjection.size() - 1; i >= 0; --i)
preferencesForInjection.remove(i).reallyInjectMembers();
}
public void enter(Context context) {
ensureContextStack();
contextStack.get().push(context);
final Key<Context> key = Key.get(Context.class);
getScopedObjectMap(key).put(key, context);
if( Ln.isVerboseEnabled() ) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null )
Ln.v("Contexts in the %s scope map after inserting %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public void exit(Context context) {
ensureContextStack();
contextStack.get().remove(context);
}
public void dispose(Context context) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null ) {
final Map<Key<?>,Object> scopedObjects = map.remove(context);
if( scopedObjects!=null )
scopedObjects.clear();
if( Ln.isVerboseEnabled() )
Ln.v("Contexts in the %s scope map after removing %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
public T get() {
Map<Key<?>, Object> scopedObjects = getScopedObjectMap(key);
@SuppressWarnings("unchecked")
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put(key, current);
}
return current;
}
};
}
protected void ensureContextStack() {
if (contextStack.get() == null) {
contextStack.set(new WeakActiveStack<Context>());
}
}
protected <T> Map<Key<?>, Object> getScopedObjectMap(Key<T> key) {
final Context context = contextStack.get().peek();
Map<Key<?>,Object> scopedObjects = values.get(context);
if (scopedObjects == null) {
scopedObjects = Maps.newHashMap();
values.put(context, scopedObjects);
}
return scopedObjects;
}
/**
* A circular stack like structure of weak references.
* Calls to push while not add any new items to stack if the item already exists,
* it will simply bring the item to the top of the stack.
*
* Likewise, pop will not remove the item from the stack, it will simply make the next item
* the top, move the current top to the bottom. Thus creating a circular linked list type effect.
*
* To remove an item explicitly call the remove method.
*
* The stack also holds WeakReferences of T, these references will automatically be removed
* anytime the stack accessed. For performance they are only removed as they are encountered.
*
* So it is possible to get a null value back, even though you thought the stack had items in it.
* @param <T>
*/
public static class WeakActiveStack<T> {
static class Node<T> {
Node<T> previous;
Node<T> next;
WeakReference<T> value;
public Node(T value) {
this.value = new WeakReference<T>(value);
}
}
private Node<T> head;
private Node<T> tail;
/**
* Pushes the value onto the top of the stack.
* If the value exists in the stack it is simply brought to the top.
* @param value
*/
public void push(T value) {
if (head == null) {
head = new Node<T>(value);
tail = head;
} else {
Node<T> existingNode = findNode(value);
if (existingNode == null) {
Node<T> newNode = new Node<T>(value);
newNode.next = head;
head.previous = newNode;
head = newNode;
} else {
if (existingNode == head) return;
if (existingNode == tail) {
tail = existingNode.previous;
tail.next= null;
}
if (existingNode.previous != null) {
existingNode.previous.next = existingNode.next;
}
if (existingNode.next != null) {
existingNode.next.previous = existingNode.previous;
}
existingNode.next = head;
head.previous = existingNode;
head = existingNode;
head.previous = null;
}
}
}
/**
* Pops the first item off the stack, then moves it to the bottom.
* Popping is an infinite operation that will never end, it just keeps moving the top item to the bottom.
* Popping will also dispose of items whose weak references have been collected.
* @return The value of the item at the top of the stack.
*/
public T pop() {
WeakActiveStack.Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
if (node.next != null) {
head = node.next;
node.previous = tail;
tail.next = node;
node.next = null;
head.previous = null;
tail = node;
}
return value;
}
}
return null;
}
/**
* Non destructive read of the item at the top of stack.
* @return the first non collected referent held, or null if nothing is available.
*/
public T peek() {
Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
return value;
}
}
return null;
}
/**
* Removes the item from the stack.
* @param value
*/
public void remove(T value) {
Node<T> node = findNode(value);
disposeOfNode(node);
}
/**
* Removes a node ensuring all links are properly updated.
* @param node
* @return The next node in the stack.
*/
protected Node<T> disposeOfNode(Node<T> node) {
if (node == head) {
head = node.next;
if (head == null) {
tail = null;
} else {
head.previous = null;
}
}
if (node.previous != null) {
node.previous.next = node.next;
}
if (node.next != null) {
node.next.previous = node.previous;
}
if (node == tail) {
tail = node.previous;
tail.next = null;
}
return node.next;
}
/**
* Finds a node given a value
* Will dispose of nodes if needed as it iterates the stack.
* @param value
* @return The node if found or null
*/
protected Node<T> findNode(T value) {
Node<T> node = head;
while (node != null) {
final T nodeValue = node.value.get();
if (nodeValue == null) {
node = disposeOfNode(node);
} else if (nodeValue.equals(value)) {
return node;
} else {
node = node.next;
}
}
return null;
}
}
}
Right
/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import android.app.Application;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
/**
*
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected final ThreadLocal<Map<Key<Context>, Object>> values = new ThreadLocal<Map<Key<Context>, Object>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
protected Application app;
public ContextScope( Application app ) {
this.app = app;
}
/**
* Scopes can be entered multiple times with no problems (eg. from
* onCreate(), onStart(), etc). However, once they're closed, all their
* previous values are gone forever until the scope is reinitialized again
* via enter().
*/
public void enter(Context context) {
Map<Key<Context>,Object> map = values.get();
if( map==null ) {
map = new HashMap<Key<Context>,Object>();
values.set(map);
}
map.put(Key.get(Context.class), context);
}
public void exit(Context ignored) {
values.remove();
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i) {
viewsForInjection.remove(i).reallyInjectMembers();
}
}
public Provider<Context> scope() {
return scope(Key.get(Context.class), new Provider<Context>() {
public Context get() {
return app;
}
});
}
/**
* @param <T> is only allowed to be Context
*/
@SuppressWarnings({"SuspiciousMethodCalls","unchecked"})
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
public T get() {
final Map<Key<Context>,Object> map = values.get();
final Map<Key<Context>, Object> scopedObjects = map != null ? map : initialScopedObjectMap();
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put((Key<Context>) key, current);
}
return current;
}
};
}
protected Map<Key<Context>,Object> initialScopedObjectMap() {
final HashMap<Key<Context>,Object> map = new HashMap<Key<Context>,Object>();
map.put(Key.get(Context.class),app);
return map;
}
}
/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import android.app.Application;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
/**
*
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected final ThreadLocal<Map<Key<Context>, Object>> values = new ThreadLocal<Map<Key<Context>, Object>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
protected Application app;
public ContextScope( Application app ) {
this.app = app;
}
/**
* Scopes can be entered multiple times with no problems (eg. from
* onCreate(), onStart(), etc). However, once they're closed, all their
* previous values are gone forever until the scope is reinitialized again
* via enter().
*/
public void enter(Context context) {
Map<Key<Context>,Object> map = values.get();
if( map==null ) {
map = new HashMap<Key<Context>,Object>();
values.set(map);
}
map.put(Key.get(Context.class), context);
}
public void exit(Context ignored) {
values.remove();
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i) {
viewsForInjection.remove(i).reallyInjectMembers();
}
}
public Provider<Context> scope() {
return scope(Key.get(Context.class), new Provider<Context>() {
public Context get() {
return app;
}
});
}
/**
* @param <T> is only allowed to be Context
*/
@SuppressWarnings({"SuspiciousMethodCalls","unchecked"})
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
public T get() {
final Map<Key<Context>,Object> map = values.get();
final Map<Key<Context>, Object> scopedObjects = map != null ? map : initialScopedObjectMap();
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put((Key<Context>) key, current);
}
return current;
}
};
}
protected Map<Key<Context>,Object> initialScopedObjectMap() {
final HashMap<Key<Context>,Object> map = new HashMap<Key<Context>,Object>();
map.put(Key.get(Context.class),app);
return map;
}
}
MergeMethods
/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import roboguice.util.Ln;
import roboguice.util.Strings;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import android.app.Application;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import com.google.inject.internal.Maps;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Map;
import java.util.WeakHashMap;
/**
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected WeakHashMap<Context,Map<Key<?>, Object>> values = new WeakHashMap<Context,Map<Key<?>, Object>>();
protected ThreadLocal<WeakActiveStack<Context>> contextStack = new ThreadLocal<WeakActiveStack<Context>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
protected ArrayList<PreferenceMembersInjector<?>> preferencesForInjection = new ArrayList<PreferenceMembersInjector<?>>();
<<<<<<< MINE
=======
protected Application app;
>>>>>>> YOURS
public ContextScope( Application app ) {
enter(app);
}
public void enter(Context context) {
ensureContextStack();
contextStack.get().push(context);
final Key<Context> key = Key.get(Context.class);
getScopedObjectMap(key).put(key, context);
if( Ln.isVerboseEnabled() ) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null )
Ln.v("Contexts in the %s scope map after inserting %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
<<<<<<< MINE
public void exit(Context context) {
ensureContextStack();
contextStack.get().remove(context);
=======
public void exit(Context ignored) {
values.remove();
>>>>>>> YOURS
}
public void dispose(Context context) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null ) {
final Map<Key<?>,Object> scopedObjects = map.remove(context);
if( scopedObjects!=null )
scopedObjects.clear();
if( Ln.isVerboseEnabled() )
Ln.v("Contexts in the %s scope map after removing %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void registerPreferenceForInjection(PreferenceMembersInjector<?> injector) {
preferencesForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i)
viewsForInjection.remove(i).reallyInjectMembers();
}
public void injectPreferenceViews() {
for (int i = preferencesForInjection.size() - 1; i >= 0; --i)
preferencesForInjection.remove(i).reallyInjectMembers();
}
public Provider<Context> scope() {
return scope(Key.get(Context.class), new Provider<Context>() {
public Context get() {
return app;
}
});
}
@SuppressWarnings({"SuspiciousMethodCalls","unchecked"})
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
public T get() {
<<<<<<< MINE
Map<Key<?>, Object> scopedObjects = getScopedObjectMap(key);
=======
final Map<Key<Context>,Object> map = values.get();
final Map<Key<Context>, Object> scopedObjects = map != null ? map : initialScopedObjectMap();
>>>>>>> YOURS
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put(key, current);
}
return current;
}
};
}
protected void ensureContextStack() {
if (contextStack.get() == null) {
contextStack.set(new WeakActiveStack<Context>());
}
}
protected <T> Map<Key<?>, Object> getScopedObjectMap(Key<T> key) {
final Context context = contextStack.get().peek();
Map<Key<?>,Object> scopedObjects = values.get(context);
if (scopedObjects == null) {
scopedObjects = Maps.newHashMap();
values.put(context, scopedObjects);
}
return scopedObjects;
}
/**
* A circular stack like structure of weak references.
* Calls to push while not add any new items to stack if the item already exists,
* it will simply bring the item to the top of the stack.
*
* Likewise, pop will not remove the item from the stack, it will simply make the next item
* the top, move the current top to the bottom. Thus creating a circular linked list type effect.
*
* To remove an item explicitly call the remove method.
*
* The stack also holds WeakReferences of T, these references will automatically be removed
* anytime the stack accessed. For performance they are only removed as they are encountered.
*
* So it is possible to get a null value back, even though you thought the stack had items in it.
* @param <T>
*/
public static class WeakActiveStack <T> {
static class Node <T> {
Node<T> previous;
Node<T> next;
WeakReference<T> value;
public Node(T value) {
this.value = new WeakReference<T>(value);
}
}
private Node<T> head;
private Node<T> tail;
/**
* Pushes the value onto the top of the stack.
* If the value exists in the stack it is simply brought to the top.
* @param value
*/
public void push(T value) {
if (head == null) {
head = new Node<T>(value);
tail = head;
} else {
Node<T> existingNode = findNode(value);
if (existingNode == null) {
Node<T> newNode = new Node<T>(value);
newNode.next = head;
head.previous = newNode;
head = newNode;
} else {
if (existingNode == head) return;
if (existingNode == tail) {
tail = existingNode.previous;
tail.next= null;
}
if (existingNode.previous != null) {
existingNode.previous.next = existingNode.next;
}
if (existingNode.next != null) {
existingNode.next.previous = existingNode.previous;
}
existingNode.next = head;
head.previous = existingNode;
head = existingNode;
head.previous = null;
}
}
}
/**
* Pops the first item off the stack, then moves it to the bottom.
* Popping is an infinite operation that will never end, it just keeps moving the top item to the bottom.
* Popping will also dispose of items whose weak references have been collected.
* @return The value of the item at the top of the stack.
*/
public T pop() {
WeakActiveStack.Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
if (node.next != null) {
head = node.next;
node.previous = tail;
tail.next = node;
node.next = null;
head.previous = null;
tail = node;
}
return value;
}
}
return null;
}
/**
* Non destructive read of the item at the top of stack.
* @return the first non collected referent held, or null if nothing is available.
*/
public T peek() {
Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
return value;
}
}
return null;
}
/**
* Removes the item from the stack.
* @param value
*/
public void remove(T value) {
Node<T> node = findNode(value);
disposeOfNode(node);
}
/**
* Removes a node ensuring all links are properly updated.
* @param node
* @return The next node in the stack.
*/
protected Node<T> disposeOfNode(Node<T> node) {
if (node == head) {
head = node.next;
if (head == null) {
tail = null;
} else {
head.previous = null;
}
}
if (node.previous != null) {
node.previous.next = node.next;
}
if (node.next != null) {
node.next.previous = node.previous;
}
if (node == tail) {
tail = node.previous;
tail.next = null;
}
return node.next;
}
/**
* Finds a node given a value
* Will dispose of nodes if needed as it iterates the stack.
* @param value
* @return The node if found or null
*/
protected Node<T> findNode(T value) {
Node<T> node = head;
while (node != null) {
final T nodeValue = node.value.get();
if (nodeValue == null) {
node = disposeOfNode(node);
} else if (nodeValue.equals(value)) {
return node;
} else {
node = node.next;
}
}
return null;
}
}
}
/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import roboguice.util.Ln;
import roboguice.util.Strings;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import android.app.Application;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import com.google.inject.internal.Maps;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Map;
import java.util.WeakHashMap;
/**
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected WeakHashMap<Context,Map<Key<?>, Object>> values = new WeakHashMap<Context,Map<Key<?>, Object>>();
protected ThreadLocal<WeakActiveStack<Context>> contextStack = new ThreadLocal<WeakActiveStack<Context>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
protected ArrayList<PreferenceMembersInjector<?>> preferencesForInjection = new ArrayList<PreferenceMembersInjector<?>>();
<<<<<<< MINE
=======
protected Application app;
>>>>>>> YOURS
public ContextScope( Application app ) {
enter(app);
}
public void enter(Context context) {
ensureContextStack();
contextStack.get().push(context);
final Key<Context> key = Key.get(Context.class);
getScopedObjectMap(key).put(key, context);
if( Ln.isVerboseEnabled() ) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null )
Ln.v("Contexts in the %s scope map after inserting %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
<<<<<<< MINE
public void exit(Context context) {
ensureContextStack();
contextStack.get().remove(context);
=======
public void exit(Context ignored) {
values.remove();
>>>>>>> YOURS
}
public void dispose(Context context) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null ) {
final Map<Key<?>,Object> scopedObjects = map.remove(context);
if( scopedObjects!=null )
scopedObjects.clear();
if( Ln.isVerboseEnabled() )
Ln.v("Contexts in the %s scope map after removing %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void registerPreferenceForInjection(PreferenceMembersInjector<?> injector) {
preferencesForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i)
viewsForInjection.remove(i).reallyInjectMembers();
}
public void injectPreferenceViews() {
for (int i = preferencesForInjection.size() - 1; i >= 0; --i)
preferencesForInjection.remove(i).reallyInjectMembers();
}
public Provider<Context> scope() {
return scope(Key.get(Context.class), new Provider<Context>() {
public Context get() {
return app;
}
});
}
@SuppressWarnings({"SuspiciousMethodCalls","unchecked"})
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
public T get() {
<<<<<<< MINE
Map<Key<?>, Object> scopedObjects = getScopedObjectMap(key);
=======
final Map<Key<Context>,Object> map = values.get();
final Map<Key<Context>, Object> scopedObjects = map != null ? map : initialScopedObjectMap();
>>>>>>> YOURS
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put(key, current);
}
return current;
}
};
}
protected void ensureContextStack() {
if (contextStack.get() == null) {
contextStack.set(new WeakActiveStack<Context>());
}
}
protected <T> Map<Key<?>, Object> getScopedObjectMap(Key<T> key) {
final Context context = contextStack.get().peek();
Map<Key<?>,Object> scopedObjects = values.get(context);
if (scopedObjects == null) {
scopedObjects = Maps.newHashMap();
values.put(context, scopedObjects);
}
return scopedObjects;
}
/**
* A circular stack like structure of weak references.
* Calls to push while not add any new items to stack if the item already exists,
* it will simply bring the item to the top of the stack.
*
* Likewise, pop will not remove the item from the stack, it will simply make the next item
* the top, move the current top to the bottom. Thus creating a circular linked list type effect.
*
* To remove an item explicitly call the remove method.
*
* The stack also holds WeakReferences of T, these references will automatically be removed
* anytime the stack accessed. For performance they are only removed as they are encountered.
*
* So it is possible to get a null value back, even though you thought the stack had items in it.
* @param <T>
*/
public static class WeakActiveStack <T> {
static class Node <T> {
Node<T> previous;
Node<T> next;
WeakReference<T> value;
public Node(T value) {
this.value = new WeakReference<T>(value);
}
}
private Node<T> head;
private Node<T> tail;
/**
* Pushes the value onto the top of the stack.
* If the value exists in the stack it is simply brought to the top.
* @param value
*/
public void push(T value) {
if (head == null) {
head = new Node<T>(value);
tail = head;
} else {
Node<T> existingNode = findNode(value);
if (existingNode == null) {
Node<T> newNode = new Node<T>(value);
newNode.next = head;
head.previous = newNode;
head = newNode;
} else {
if (existingNode == head) return;
if (existingNode == tail) {
tail = existingNode.previous;
tail.next= null;
}
if (existingNode.previous != null) {
existingNode.previous.next = existingNode.next;
}
if (existingNode.next != null) {
existingNode.next.previous = existingNode.previous;
}
existingNode.next = head;
head.previous = existingNode;
head = existingNode;
head.previous = null;
}
}
}
/**
* Pops the first item off the stack, then moves it to the bottom.
* Popping is an infinite operation that will never end, it just keeps moving the top item to the bottom.
* Popping will also dispose of items whose weak references have been collected.
* @return The value of the item at the top of the stack.
*/
public T pop() {
WeakActiveStack.Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
if (node.next != null) {
head = node.next;
node.previous = tail;
tail.next = node;
node.next = null;
head.previous = null;
tail = node;
}
return value;
}
}
return null;
}
/**
* Non destructive read of the item at the top of stack.
* @return the first non collected referent held, or null if nothing is available.
*/
public T peek() {
Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
return value;
}
}
return null;
}
/**
* Removes the item from the stack.
* @param value
*/
public void remove(T value) {
Node<T> node = findNode(value);
disposeOfNode(node);
}
/**
* Removes a node ensuring all links are properly updated.
* @param node
* @return The next node in the stack.
*/
protected Node<T> disposeOfNode(Node<T> node) {
if (node == head) {
head = node.next;
if (head == null) {
tail = null;
} else {
head.previous = null;
}
}
if (node.previous != null) {
node.previous.next = node.next;
}
if (node.next != null) {
node.next.previous = node.previous;
}
if (node == tail) {
tail = node.previous;
tail.next = null;
}
return node.next;
}
/**
* Finds a node given a value
* Will dispose of nodes if needed as it iterates the stack.
* @param value
* @return The node if found or null
*/
protected Node<T> findNode(T value) {
Node<T> node = head;
while (node != null) {
final T nodeValue = node.value.get();
if (nodeValue == null) {
node = disposeOfNode(node);
} else if (nodeValue.equals(value)) {
return node;
} else {
node = node.next;
}
}
return null;
}
}
}
KeepBothMethods
/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import roboguice.util.Ln;
import roboguice.util.Strings;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import android.app.Application;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import com.google.inject.internal.Maps;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Map;
import java.util.WeakHashMap;
/**
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected WeakHashMap<Context,Map<Key<?>, Object>> values = new WeakHashMap<Context,Map<Key<?>, Object>>();
protected ThreadLocal<WeakActiveStack<Context>> contextStack = new ThreadLocal<WeakActiveStack<Context>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
protected ArrayList<PreferenceMembersInjector<?>> preferencesForInjection = new ArrayList<PreferenceMembersInjector<?>>();
<<<<<<< MINE
=======
protected Application app;
>>>>>>> YOURS
public ContextScope(RoboApplication app) {
enter(app);
}
public ContextScope( Application app ) {
this.app = app;
}
public void enter(Context context) {
ensureContextStack();
contextStack.get().push(context);
final Key<Context> key = Key.get(Context.class);
getScopedObjectMap(key).put(key, context);
if( Ln.isVerboseEnabled() ) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null )
Ln.v("Contexts in the %s scope map after inserting %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
<<<<<<< MINE
public void exit(Context context) {
ensureContextStack();
contextStack.get().remove(context);
=======
public void exit(Context ignored) {
values.remove();
>>>>>>> YOURS
}
public void dispose(Context context) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null ) {
final Map<Key<?>,Object> scopedObjects = map.remove(context);
if( scopedObjects!=null )
scopedObjects.clear();
if( Ln.isVerboseEnabled() )
Ln.v("Contexts in the %s scope map after removing %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void registerPreferenceForInjection(PreferenceMembersInjector<?> injector) {
preferencesForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i)
viewsForInjection.remove(i).reallyInjectMembers();
}
public void injectPreferenceViews() {
for (int i = preferencesForInjection.size() - 1; i >= 0; --i)
preferencesForInjection.remove(i).reallyInjectMembers();
}
public Provider<Context> scope() {
return scope(Key.get(Context.class), new Provider<Context>() {
public Context get() {
return app;
}
});
}
@SuppressWarnings({"SuspiciousMethodCalls","unchecked"})
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
public T get() {
<<<<<<< MINE
Map<Key<?>, Object> scopedObjects = getScopedObjectMap(key);
=======
final Map<Key<Context>,Object> map = values.get();
final Map<Key<Context>, Object> scopedObjects = map != null ? map : initialScopedObjectMap();
>>>>>>> YOURS
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put(key, current);
}
return current;
}
};
}
protected void ensureContextStack() {
if (contextStack.get() == null) {
contextStack.set(new WeakActiveStack<Context>());
}
}
protected <T> Map<Key<?>, Object> getScopedObjectMap(Key<T> key) {
final Context context = contextStack.get().peek();
Map<Key<?>,Object> scopedObjects = values.get(context);
if (scopedObjects == null) {
scopedObjects = Maps.newHashMap();
values.put(context, scopedObjects);
}
return scopedObjects;
}
/**
* A circular stack like structure of weak references.
* Calls to push while not add any new items to stack if the item already exists,
* it will simply bring the item to the top of the stack.
*
* Likewise, pop will not remove the item from the stack, it will simply make the next item
* the top, move the current top to the bottom. Thus creating a circular linked list type effect.
*
* To remove an item explicitly call the remove method.
*
* The stack also holds WeakReferences of T, these references will automatically be removed
* anytime the stack accessed. For performance they are only removed as they are encountered.
*
* So it is possible to get a null value back, even though you thought the stack had items in it.
* @param <T>
*/
public static class WeakActiveStack <T> {
static class Node <T> {
Node<T> previous;
Node<T> next;
WeakReference<T> value;
public Node(T value) {
this.value = new WeakReference<T>(value);
}
}
private Node<T> head;
private Node<T> tail;
/**
* Pushes the value onto the top of the stack.
* If the value exists in the stack it is simply brought to the top.
* @param value
*/
public void push(T value) {
if (head == null) {
head = new Node<T>(value);
tail = head;
} else {
Node<T> existingNode = findNode(value);
if (existingNode == null) {
Node<T> newNode = new Node<T>(value);
newNode.next = head;
head.previous = newNode;
head = newNode;
} else {
if (existingNode == head) return;
if (existingNode == tail) {
tail = existingNode.previous;
tail.next= null;
}
if (existingNode.previous != null) {
existingNode.previous.next = existingNode.next;
}
if (existingNode.next != null) {
existingNode.next.previous = existingNode.previous;
}
existingNode.next = head;
head.previous = existingNode;
head = existingNode;
head.previous = null;
}
}
}
/**
* Pops the first item off the stack, then moves it to the bottom.
* Popping is an infinite operation that will never end, it just keeps moving the top item to the bottom.
* Popping will also dispose of items whose weak references have been collected.
* @return The value of the item at the top of the stack.
*/
public T pop() {
WeakActiveStack.Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
if (node.next != null) {
head = node.next;
node.previous = tail;
tail.next = node;
node.next = null;
head.previous = null;
tail = node;
}
return value;
}
}
return null;
}
/**
* Non destructive read of the item at the top of stack.
* @return the first non collected referent held, or null if nothing is available.
*/
public T peek() {
Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
return value;
}
}
return null;
}
/**
* Removes the item from the stack.
* @param value
*/
public void remove(T value) {
Node<T> node = findNode(value);
disposeOfNode(node);
}
/**
* Removes a node ensuring all links are properly updated.
* @param node
* @return The next node in the stack.
*/
protected Node<T> disposeOfNode(Node<T> node) {
if (node == head) {
head = node.next;
if (head == null) {
tail = null;
} else {
head.previous = null;
}
}
if (node.previous != null) {
node.previous.next = node.next;
}
if (node.next != null) {
node.next.previous = node.previous;
}
if (node == tail) {
tail = node.previous;
tail.next = null;
}
return node.next;
}
/**
* Finds a node given a value
* Will dispose of nodes if needed as it iterates the stack.
* @param value
* @return The node if found or null
*/
protected Node<T> findNode(T value) {
Node<T> node = head;
while (node != null) {
final T nodeValue = node.value.get();
if (nodeValue == null) {
node = disposeOfNode(node);
} else if (nodeValue.equals(value)) {
return node;
} else {
node = node.next;
}
}
return null;
}
}
}
/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import roboguice.util.Ln;
import roboguice.util.Strings;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import android.app.Application;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import com.google.inject.internal.Maps;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Map;
import java.util.WeakHashMap;
/**
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected WeakHashMap<Context,Map<Key<?>, Object>> values = new WeakHashMap<Context,Map<Key<?>, Object>>();
protected ThreadLocal<WeakActiveStack<Context>> contextStack = new ThreadLocal<WeakActiveStack<Context>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
protected ArrayList<PreferenceMembersInjector<?>> preferencesForInjection = new ArrayList<PreferenceMembersInjector<?>>();
<<<<<<< MINE
=======
protected Application app;
>>>>>>> YOURS
public ContextScope(RoboApplication app) {
enter(app);
}
public ContextScope( Application app ) {
this.app = app;
}
public void enter(Context context) {
ensureContextStack();
contextStack.get().push(context);
final Key<Context> key = Key.get(Context.class);
getScopedObjectMap(key).put(key, context);
if( Ln.isVerboseEnabled() ) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null )
Ln.v("Contexts in the %s scope map after inserting %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
<<<<<<< MINE
public void exit(Context context) {
ensureContextStack();
contextStack.get().remove(context);
=======
public void exit(Context ignored) {
values.remove();
>>>>>>> YOURS
}
public void dispose(Context context) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null ) {
final Map<Key<?>,Object> scopedObjects = map.remove(context);
if( scopedObjects!=null )
scopedObjects.clear();
if( Ln.isVerboseEnabled() )
Ln.v("Contexts in the %s scope map after removing %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void registerPreferenceForInjection(PreferenceMembersInjector<?> injector) {
preferencesForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i)
viewsForInjection.remove(i).reallyInjectMembers();
}
public void injectPreferenceViews() {
for (int i = preferencesForInjection.size() - 1; i >= 0; --i)
preferencesForInjection.remove(i).reallyInjectMembers();
}
public Provider<Context> scope() {
return scope(Key.get(Context.class), new Provider<Context>() {
public Context get() {
return app;
}
});
}
@SuppressWarnings({"SuspiciousMethodCalls","unchecked"})
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
public T get() {
<<<<<<< MINE
Map<Key<?>, Object> scopedObjects = getScopedObjectMap(key);
=======
final Map<Key<Context>,Object> map = values.get();
final Map<Key<Context>, Object> scopedObjects = map != null ? map : initialScopedObjectMap();
>>>>>>> YOURS
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put(key, current);
}
return current;
}
};
}
protected void ensureContextStack() {
if (contextStack.get() == null) {
contextStack.set(new WeakActiveStack<Context>());
}
}
protected <T> Map<Key<?>, Object> getScopedObjectMap(Key<T> key) {
final Context context = contextStack.get().peek();
Map<Key<?>,Object> scopedObjects = values.get(context);
if (scopedObjects == null) {
scopedObjects = Maps.newHashMap();
values.put(context, scopedObjects);
}
return scopedObjects;
}
/**
* A circular stack like structure of weak references.
* Calls to push while not add any new items to stack if the item already exists,
* it will simply bring the item to the top of the stack.
*
* Likewise, pop will not remove the item from the stack, it will simply make the next item
* the top, move the current top to the bottom. Thus creating a circular linked list type effect.
*
* To remove an item explicitly call the remove method.
*
* The stack also holds WeakReferences of T, these references will automatically be removed
* anytime the stack accessed. For performance they are only removed as they are encountered.
*
* So it is possible to get a null value back, even though you thought the stack had items in it.
* @param <T>
*/
public static class WeakActiveStack <T> {
static class Node <T> {
Node<T> previous;
Node<T> next;
WeakReference<T> value;
public Node(T value) {
this.value = new WeakReference<T>(value);
}
}
private Node<T> head;
private Node<T> tail;
/**
* Pushes the value onto the top of the stack.
* If the value exists in the stack it is simply brought to the top.
* @param value
*/
public void push(T value) {
if (head == null) {
head = new Node<T>(value);
tail = head;
} else {
Node<T> existingNode = findNode(value);
if (existingNode == null) {
Node<T> newNode = new Node<T>(value);
newNode.next = head;
head.previous = newNode;
head = newNode;
} else {
if (existingNode == head) return;
if (existingNode == tail) {
tail = existingNode.previous;
tail.next= null;
}
if (existingNode.previous != null) {
existingNode.previous.next = existingNode.next;
}
if (existingNode.next != null) {
existingNode.next.previous = existingNode.previous;
}
existingNode.next = head;
head.previous = existingNode;
head = existingNode;
head.previous = null;
}
}
}
/**
* Pops the first item off the stack, then moves it to the bottom.
* Popping is an infinite operation that will never end, it just keeps moving the top item to the bottom.
* Popping will also dispose of items whose weak references have been collected.
* @return The value of the item at the top of the stack.
*/
public T pop() {
WeakActiveStack.Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
if (node.next != null) {
head = node.next;
node.previous = tail;
tail.next = node;
node.next = null;
head.previous = null;
tail = node;
}
return value;
}
}
return null;
}
/**
* Non destructive read of the item at the top of stack.
* @return the first non collected referent held, or null if nothing is available.
*/
public T peek() {
Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
return value;
}
}
return null;
}
/**
* Removes the item from the stack.
* @param value
*/
public void remove(T value) {
Node<T> node = findNode(value);
disposeOfNode(node);
}
/**
* Removes a node ensuring all links are properly updated.
* @param node
* @return The next node in the stack.
*/
protected Node<T> disposeOfNode(Node<T> node) {
if (node == head) {
head = node.next;
if (head == null) {
tail = null;
} else {
head.previous = null;
}
}
if (node.previous != null) {
node.previous.next = node.next;
}
if (node.next != null) {
node.next.previous = node.previous;
}
if (node == tail) {
tail = node.previous;
tail.next = null;
}
return node.next;
}
/**
* Finds a node given a value
* Will dispose of nodes if needed as it iterates the stack.
* @param value
* @return The node if found or null
*/
protected Node<T> findNode(T value) {
Node<T> node = head;
while (node != null) {
final T nodeValue = node.value.get();
if (nodeValue == null) {
node = disposeOfNode(node);
} else if (nodeValue.equals(value)) {
return node;
} else {
node = node.next;
}
}
return null;
}
}
}
Safe
/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import roboguice.util.Ln;
import roboguice.util.Strings;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import android.app.Application;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import com.google.inject.internal.Maps;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Map;
import java.util.WeakHashMap;
/**
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected WeakHashMap<Context,Map<Key<?>, Object>> values = new WeakHashMap<Context,Map<Key<?>, Object>>();
protected ThreadLocal<WeakActiveStack<Context>> contextStack = new ThreadLocal<WeakActiveStack<Context>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
protected ArrayList<PreferenceMembersInjector<?>> preferencesForInjection = new ArrayList<PreferenceMembersInjector<?>>();
<<<<<<< MINE
=======
protected Application app;
>>>>>>> YOURS
<<<<<<< MINE
public ContextScope( Application app ) {
this.app = app;
=======
public ContextScope(RoboApplication app) {
enter(app);
>>>>>>> YOURS
}
public void enter(Context context) {
ensureContextStack();
contextStack.get().push(context);
final Key<Context> key = Key.get(Context.class);
getScopedObjectMap(key).put(key, context);
if( Ln.isVerboseEnabled() ) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null )
Ln.v("Contexts in the %s scope map after inserting %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
<<<<<<< MINE
public void exit(Context context) {
ensureContextStack();
contextStack.get().remove(context);
=======
public void exit(Context ignored) {
values.remove();
>>>>>>> YOURS
}
public void dispose(Context context) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null ) {
final Map<Key<?>,Object> scopedObjects = map.remove(context);
if( scopedObjects!=null )
scopedObjects.clear();
if( Ln.isVerboseEnabled() )
Ln.v("Contexts in the %s scope map after removing %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void registerPreferenceForInjection(PreferenceMembersInjector<?> injector) {
preferencesForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i)
viewsForInjection.remove(i).reallyInjectMembers();
}
public void injectPreferenceViews() {
for (int i = preferencesForInjection.size() - 1; i >= 0; --i)
preferencesForInjection.remove(i).reallyInjectMembers();
}
public Provider<Context> scope() {
return scope(Key.get(Context.class), new Provider<Context>() {
public Context get() {
return app;
}
});
}
@SuppressWarnings({"SuspiciousMethodCalls","unchecked"})
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
public T get() {
<<<<<<< MINE
Map<Key<?>, Object> scopedObjects = getScopedObjectMap(key);
=======
final Map<Key<Context>,Object> map = values.get();
final Map<Key<Context>, Object> scopedObjects = map != null ? map : initialScopedObjectMap();
>>>>>>> YOURS
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put(key, current);
}
return current;
}
};
}
protected void ensureContextStack() {
if (contextStack.get() == null) {
contextStack.set(new WeakActiveStack<Context>());
}
}
protected <T> Map<Key<?>, Object> getScopedObjectMap(Key<T> key) {
final Context context = contextStack.get().peek();
Map<Key<?>,Object> scopedObjects = values.get(context);
if (scopedObjects == null) {
scopedObjects = Maps.newHashMap();
values.put(context, scopedObjects);
}
return scopedObjects;
}
/**
* A circular stack like structure of weak references.
* Calls to push while not add any new items to stack if the item already exists,
* it will simply bring the item to the top of the stack.
*
* Likewise, pop will not remove the item from the stack, it will simply make the next item
* the top, move the current top to the bottom. Thus creating a circular linked list type effect.
*
* To remove an item explicitly call the remove method.
*
* The stack also holds WeakReferences of T, these references will automatically be removed
* anytime the stack accessed. For performance they are only removed as they are encountered.
*
* So it is possible to get a null value back, even though you thought the stack had items in it.
* @param <T>
*/
public static class WeakActiveStack <T> {
static class Node <T> {
Node<T> previous;
Node<T> next;
WeakReference<T> value;
public Node(T value) {
this.value = new WeakReference<T>(value);
}
}
private Node<T> head;
private Node<T> tail;
/**
* Pushes the value onto the top of the stack.
* If the value exists in the stack it is simply brought to the top.
* @param value
*/
public void push(T value) {
if (head == null) {
head = new Node<T>(value);
tail = head;
} else {
Node<T> existingNode = findNode(value);
if (existingNode == null) {
Node<T> newNode = new Node<T>(value);
newNode.next = head;
head.previous = newNode;
head = newNode;
} else {
if (existingNode == head) return;
if (existingNode == tail) {
tail = existingNode.previous;
tail.next= null;
}
if (existingNode.previous != null) {
existingNode.previous.next = existingNode.next;
}
if (existingNode.next != null) {
existingNode.next.previous = existingNode.previous;
}
existingNode.next = head;
head.previous = existingNode;
head = existingNode;
head.previous = null;
}
}
}
/**
* Pops the first item off the stack, then moves it to the bottom.
* Popping is an infinite operation that will never end, it just keeps moving the top item to the bottom.
* Popping will also dispose of items whose weak references have been collected.
* @return The value of the item at the top of the stack.
*/
public T pop() {
WeakActiveStack.Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
if (node.next != null) {
head = node.next;
node.previous = tail;
tail.next = node;
node.next = null;
head.previous = null;
tail = node;
}
return value;
}
}
return null;
}
/**
* Non destructive read of the item at the top of stack.
* @return the first non collected referent held, or null if nothing is available.
*/
public T peek() {
Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
return value;
}
}
return null;
}
/**
* Removes the item from the stack.
* @param value
*/
public void remove(T value) {
Node<T> node = findNode(value);
disposeOfNode(node);
}
/**
* Removes a node ensuring all links are properly updated.
* @param node
* @return The next node in the stack.
*/
protected Node<T> disposeOfNode(Node<T> node) {
if (node == head) {
head = node.next;
if (head == null) {
tail = null;
} else {
head.previous = null;
}
}
if (node.previous != null) {
node.previous.next = node.next;
}
if (node.next != null) {
node.next.previous = node.previous;
}
if (node == tail) {
tail = node.previous;
tail.next = null;
}
return node.next;
}
/**
* Finds a node given a value
* Will dispose of nodes if needed as it iterates the stack.
* @param value
* @return The node if found or null
*/
protected Node<T> findNode(T value) {
Node<T> node = head;
while (node != null) {
final T nodeValue = node.value.get();
if (nodeValue == null) {
node = disposeOfNode(node);
} else if (nodeValue.equals(value)) {
return node;
} else {
node = node.next;
}
}
return null;
}
}
}
/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import roboguice.util.Ln;
import roboguice.util.Strings;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
import android.app.Application;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import com.google.inject.internal.Maps;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Map;
import java.util.WeakHashMap;
/**
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected WeakHashMap<Context,Map<Key<?>, Object>> values = new WeakHashMap<Context,Map<Key<?>, Object>>();
protected ThreadLocal<WeakActiveStack<Context>> contextStack = new ThreadLocal<WeakActiveStack<Context>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
protected ArrayList<PreferenceMembersInjector<?>> preferencesForInjection = new ArrayList<PreferenceMembersInjector<?>>();
<<<<<<< MINE
=======
protected Application app;
>>>>>>> YOURS
<<<<<<< MINE
public ContextScope( Application app ) {
this.app = app;
=======
public ContextScope(RoboApplication app) {
enter(app);
>>>>>>> YOURS
}
public void enter(Context context) {
ensureContextStack();
contextStack.get().push(context);
final Key<Context> key = Key.get(Context.class);
getScopedObjectMap(key).put(key, context);
if( Ln.isVerboseEnabled() ) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null )
Ln.v("Contexts in the %s scope map after inserting %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
<<<<<<< MINE
public void exit(Context context) {
ensureContextStack();
contextStack.get().remove(context);
=======
public void exit(Context ignored) {
values.remove();
>>>>>>> YOURS
}
public void dispose(Context context) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null ) {
final Map<Key<?>,Object> scopedObjects = map.remove(context);
if( scopedObjects!=null )
scopedObjects.clear();
if( Ln.isVerboseEnabled() )
Ln.v("Contexts in the %s scope map after removing %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void registerPreferenceForInjection(PreferenceMembersInjector<?> injector) {
preferencesForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i)
viewsForInjection.remove(i).reallyInjectMembers();
}
public void injectPreferenceViews() {
for (int i = preferencesForInjection.size() - 1; i >= 0; --i)
preferencesForInjection.remove(i).reallyInjectMembers();
}
public Provider<Context> scope() {
return scope(Key.get(Context.class), new Provider<Context>() {
public Context get() {
return app;
}
});
}
@SuppressWarnings({"SuspiciousMethodCalls","unchecked"})
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
public T get() {
<<<<<<< MINE
Map<Key<?>, Object> scopedObjects = getScopedObjectMap(key);
=======
final Map<Key<Context>,Object> map = values.get();
final Map<Key<Context>, Object> scopedObjects = map != null ? map : initialScopedObjectMap();
>>>>>>> YOURS
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put(key, current);
}
return current;
}
};
}
protected void ensureContextStack() {
if (contextStack.get() == null) {
contextStack.set(new WeakActiveStack<Context>());
}
}
protected <T> Map<Key<?>, Object> getScopedObjectMap(Key<T> key) {
final Context context = contextStack.get().peek();
Map<Key<?>,Object> scopedObjects = values.get(context);
if (scopedObjects == null) {
scopedObjects = Maps.newHashMap();
values.put(context, scopedObjects);
}
return scopedObjects;
}
/**
* A circular stack like structure of weak references.
* Calls to push while not add any new items to stack if the item already exists,
* it will simply bring the item to the top of the stack.
*
* Likewise, pop will not remove the item from the stack, it will simply make the next item
* the top, move the current top to the bottom. Thus creating a circular linked list type effect.
*
* To remove an item explicitly call the remove method.
*
* The stack also holds WeakReferences of T, these references will automatically be removed
* anytime the stack accessed. For performance they are only removed as they are encountered.
*
* So it is possible to get a null value back, even though you thought the stack had items in it.
* @param <T>
*/
public static class WeakActiveStack <T> {
static class Node <T> {
Node<T> previous;
Node<T> next;
WeakReference<T> value;
public Node(T value) {
this.value = new WeakReference<T>(value);
}
}
private Node<T> head;
private Node<T> tail;
/**
* Pushes the value onto the top of the stack.
* If the value exists in the stack it is simply brought to the top.
* @param value
*/
public void push(T value) {
if (head == null) {
head = new Node<T>(value);
tail = head;
} else {
Node<T> existingNode = findNode(value);
if (existingNode == null) {
Node<T> newNode = new Node<T>(value);
newNode.next = head;
head.previous = newNode;
head = newNode;
} else {
if (existingNode == head) return;
if (existingNode == tail) {
tail = existingNode.previous;
tail.next= null;
}
if (existingNode.previous != null) {
existingNode.previous.next = existingNode.next;
}
if (existingNode.next != null) {
existingNode.next.previous = existingNode.previous;
}
existingNode.next = head;
head.previous = existingNode;
head = existingNode;
head.previous = null;
}
}
}
/**
* Pops the first item off the stack, then moves it to the bottom.
* Popping is an infinite operation that will never end, it just keeps moving the top item to the bottom.
* Popping will also dispose of items whose weak references have been collected.
* @return The value of the item at the top of the stack.
*/
public T pop() {
WeakActiveStack.Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
if (node.next != null) {
head = node.next;
node.previous = tail;
tail.next = node;
node.next = null;
head.previous = null;
tail = node;
}
return value;
}
}
return null;
}
/**
* Non destructive read of the item at the top of stack.
* @return the first non collected referent held, or null if nothing is available.
*/
public T peek() {
Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
return value;
}
}
return null;
}
/**
* Removes the item from the stack.
* @param value
*/
public void remove(T value) {
Node<T> node = findNode(value);
disposeOfNode(node);
}
/**
* Removes a node ensuring all links are properly updated.
* @param node
* @return The next node in the stack.
*/
protected Node<T> disposeOfNode(Node<T> node) {
if (node == head) {
head = node.next;
if (head == null) {
tail = null;
} else {
head.previous = null;
}
}
if (node.previous != null) {
node.previous.next = node.next;
}
if (node.next != null) {
node.next.previous = node.previous;
}
if (node == tail) {
tail = node.previous;
tail.next = null;
}
return node.next;
}
/**
* Finds a node given a value
* Will dispose of nodes if needed as it iterates the stack.
* @param value
* @return The node if found or null
*/
protected Node<T> findNode(T value) {
Node<T> node = head;
while (node != null) {
final T nodeValue = node.value.get();
if (nodeValue == null) {
node = disposeOfNode(node);
} else if (nodeValue.equals(value)) {
return node;
} else {
node = node.next;
}
}
return null;
}
}
}
Unstructured
/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
<<<<<<< MINE
import roboguice.application.RoboApplication;
import roboguice.util.Ln;
import roboguice.util.Strings;
=======
>>>>>>> YOURS
import android.app.Application;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import com.google.inject.internal.Maps;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Map;
import java.util.WeakHashMap;
/**
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected WeakHashMap<Context,Map<Key<?>, Object>> values = new WeakHashMap<Context,Map<Key<?>, Object>>();
protected ThreadLocal<WeakActiveStack<Context>> contextStack = new ThreadLocal<WeakActiveStack<Context>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
<<<<<<< MINE
protected ArrayList<PreferenceMembersInjector<?>> preferencesForInjection = new ArrayList<PreferenceMembersInjector<?>>();
=======
protected Application app;
>>>>>>> YOURS
<<<<<<< MINE
public ContextScope(RoboApplication app) {
enter(app);
=======
public ContextScope( Application app ) {
this.app = app;
}
/**
* Scopes can be entered multiple times with no problems (eg. from
* onCreate(), onStart(), etc). However, once they're closed, all their
* previous values are gone forever until the scope is reinitialized again
* via enter().
*/
public void enter(Context context) {
Map<Key<Context>,Object> map = values.get();
if( map==null ) {
map = new HashMap<Key<Context>,Object>();
values.set(map);
}
map.put(Key.get(Context.class), context);
}
public void exit(Context ignored) {
values.remove();
>>>>>>> YOURS
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void registerPreferenceForInjection(PreferenceMembersInjector<?> injector) {
preferencesForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i)
viewsForInjection.remove(i).reallyInjectMembers();
}
public void injectPreferenceViews() {
for (int i = preferencesForInjection.size() - 1; i >= 0; --i)
preferencesForInjection.remove(i).reallyInjectMembers();
}
public void enter(Context context) {
ensureContextStack();
contextStack.get().push(context);
final Key<Context> key = Key.get(Context.class);
getScopedObjectMap(key).put(key, context);
if( Ln.isVerboseEnabled() ) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null )
Ln.v("Contexts in the %s scope map after inserting %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public void exit(Context context) {
ensureContextStack();
contextStack.get().remove(context);
}
public void dispose(Context context) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null ) {
final Map<Key<?>,Object> scopedObjects = map.remove(context);
if( scopedObjects!=null )
scopedObjects.clear();
if( Ln.isVerboseEnabled() )
Ln.v("Contexts in the %s scope map after removing %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public Provider<Context> scope() {
return scope(Key.get(Context.class), new Provider<Context>() {
public Context get() {
return app;
}
});
}
/**
* @param <T> is only allowed to be Context
*/
@SuppressWarnings({"SuspiciousMethodCalls","unchecked"})
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
public T get() {
<<<<<<< MINE
Map<Key<?>, Object> scopedObjects = getScopedObjectMap(key);
=======
final Map<Key<Context>,Object> map = values.get();
final Map<Key<Context>, Object> scopedObjects = map != null ? map : initialScopedObjectMap();
>>>>>>> YOURS
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put(key, current);
}
return current;
}
};
<<<<<<< MINE
}
protected void ensureContextStack() {
if (contextStack.get() == null) {
contextStack.set(new WeakActiveStack<Context>());
}
=======
>>>>>>> YOURS
}
protected <T> Map<Key<?>, Object> getScopedObjectMap(Key<T> key) {
final Context context = contextStack.get().peek();
Map<Key<?>,Object> scopedObjects = values.get(context);
if (scopedObjects == null) {
scopedObjects = Maps.newHashMap();
values.put(context, scopedObjects);
}
return scopedObjects;
}
/**
* A circular stack like structure of weak references.
* Calls to push while not add any new items to stack if the item already exists,
* it will simply bring the item to the top of the stack.
*
* Likewise, pop will not remove the item from the stack, it will simply make the next item
* the top, move the current top to the bottom. Thus creating a circular linked list type effect.
*
* To remove an item explicitly call the remove method.
*
* The stack also holds WeakReferences of T, these references will automatically be removed
* anytime the stack accessed. For performance they are only removed as they are encountered.
*
* So it is possible to get a null value back, even though you thought the stack had items in it.
* @param <T>
*/
public static class WeakActiveStack<T> {
static class Node<T> {
Node<T> previous;
Node<T> next;
WeakReference<T> value;
public Node(T value) {
this.value = new WeakReference<T>(value);
}
}
private Node<T> head;
private Node<T> tail;
/**
* Pushes the value onto the top of the stack.
* If the value exists in the stack it is simply brought to the top.
* @param value
*/
public void push(T value) {
if (head == null) {
head = new Node<T>(value);
tail = head;
} else {
Node<T> existingNode = findNode(value);
if (existingNode == null) {
Node<T> newNode = new Node<T>(value);
newNode.next = head;
head.previous = newNode;
head = newNode;
} else {
if (existingNode == head) return;
if (existingNode == tail) {
tail = existingNode.previous;
tail.next= null;
}
if (existingNode.previous != null) {
existingNode.previous.next = existingNode.next;
}
if (existingNode.next != null) {
existingNode.next.previous = existingNode.previous;
}
existingNode.next = head;
head.previous = existingNode;
head = existingNode;
head.previous = null;
}
}
}
/**
* Pops the first item off the stack, then moves it to the bottom.
* Popping is an infinite operation that will never end, it just keeps moving the top item to the bottom.
* Popping will also dispose of items whose weak references have been collected.
* @return The value of the item at the top of the stack.
*/
public T pop() {
WeakActiveStack.Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
if (node.next != null) {
head = node.next;
node.previous = tail;
tail.next = node;
node.next = null;
head.previous = null;
tail = node;
}
return value;
}
}
return null;
}
/**
* Non destructive read of the item at the top of stack.
* @return the first non collected referent held, or null if nothing is available.
*/
public T peek() {
Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
return value;
}
}
return null;
}
/**
* Removes the item from the stack.
* @param value
*/
public void remove(T value) {
Node<T> node = findNode(value);
disposeOfNode(node);
}
/**
* Removes a node ensuring all links are properly updated.
* @param node
* @return The next node in the stack.
*/
protected Node<T> disposeOfNode(Node<T> node) {
if (node == head) {
head = node.next;
if (head == null) {
tail = null;
} else {
head.previous = null;
}
}
if (node.previous != null) {
node.previous.next = node.next;
}
if (node.next != null) {
node.next.previous = node.previous;
}
if (node == tail) {
tail = node.previous;
tail.next = null;
}
return node.next;
}
/**
* Finds a node given a value
* Will dispose of nodes if needed as it iterates the stack.
* @param value
* @return The node if found or null
*/
protected Node<T> findNode(T value) {
Node<T> node = head;
while (node != null) {
final T nodeValue = node.value.get();
if (nodeValue == null) {
node = disposeOfNode(node);
} else if (nodeValue.equals(value)) {
return node;
} else {
node = node.next;
}
}
return null;
}
}
}/*
* Copyright 2009 Michael Burton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*/
package roboguice.inject;
/**
* Scopes a single execution of a block of code. Apply this scope with a
* try/finally block: <pre> {@code
*
* scope.enter();
* try {
* // explicitly seed some seed objects...
* scope.seed(Key.get(SomeObject.class), someObject);
* // create and access scoped objects
* } finally {
* scope.exit();
* }
* }</pre>
*
* The scope can be initialized with one or more seed values by calling
* <code>seed(key, value)</code> before the injector will be called upon to
* provide for this key. A typical use is for a servlet filter to enter/exit the
* scope, representing a Request Scope, and seed HttpServletRequest and
* HttpServletResponse. For each key inserted with seed(), it's good practice
* (since you have to provide <i>some</i> binding anyhow) to include a
* corresponding binding that will throw an exception if Guice is asked to
* provide for that key if it was not yet seeded: <pre> {@code
*
* bind(key)
* .toProvider(ContextScope.<KeyClass>seededKeyProvider())
* .in(ScopeAnnotation.class);
* }</pre>
*
* @author Jesse Wilson
* @author Fedor Karpelevitch
*
*
* From http://code.google.com/p/google-guice/wiki/CustomScopes
*/
<<<<<<< MINE
import roboguice.application.RoboApplication;
import roboguice.util.Ln;
import roboguice.util.Strings;
=======
>>>>>>> YOURS
import android.app.Application;
import android.content.Context;
import com.google.inject.Key;
import com.google.inject.Provider;
import com.google.inject.Scope;
import com.google.inject.internal.Maps;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Map;
import java.util.WeakHashMap;
/**
* @author Mike Burton
*/
public class ContextScope implements Scope {
protected WeakHashMap<Context,Map<Key<?>, Object>> values = new WeakHashMap<Context,Map<Key<?>, Object>>();
protected ThreadLocal<WeakActiveStack<Context>> contextStack = new ThreadLocal<WeakActiveStack<Context>>();
protected ArrayList<ViewMembersInjector<?>> viewsForInjection = new ArrayList<ViewMembersInjector<?>>();
<<<<<<< MINE
protected ArrayList<PreferenceMembersInjector<?>> preferencesForInjection = new ArrayList<PreferenceMembersInjector<?>>();
=======
protected Application app;
>>>>>>> YOURS
<<<<<<< MINE
public ContextScope(RoboApplication app) {
enter(app);
=======
public ContextScope( Application app ) {
this.app = app;
}
/**
* Scopes can be entered multiple times with no problems (eg. from
* onCreate(), onStart(), etc). However, once they're closed, all their
* previous values are gone forever until the scope is reinitialized again
* via enter().
*/
public void enter(Context context) {
Map<Key<Context>,Object> map = values.get();
if( map==null ) {
map = new HashMap<Key<Context>,Object>();
values.set(map);
}
map.put(Key.get(Context.class), context);
}
public void exit(Context ignored) {
values.remove();
>>>>>>> YOURS
}
public void registerViewForInjection(ViewMembersInjector<?> injector) {
viewsForInjection.add(injector);
}
public void registerPreferenceForInjection(PreferenceMembersInjector<?> injector) {
preferencesForInjection.add(injector);
}
public void injectViews() {
for (int i = viewsForInjection.size() - 1; i >= 0; --i)
viewsForInjection.remove(i).reallyInjectMembers();
}
public void injectPreferenceViews() {
for (int i = preferencesForInjection.size() - 1; i >= 0; --i)
preferencesForInjection.remove(i).reallyInjectMembers();
}
public void enter(Context context) {
ensureContextStack();
contextStack.get().push(context);
final Key<Context> key = Key.get(Context.class);
getScopedObjectMap(key).put(key, context);
if( Ln.isVerboseEnabled() ) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null )
Ln.v("Contexts in the %s scope map after inserting %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public void exit(Context context) {
ensureContextStack();
contextStack.get().remove(context);
}
public void dispose(Context context) {
final WeakHashMap<Context,Map<Key<?>,Object>> map = values;
if( map!=null ) {
final Map<Key<?>,Object> scopedObjects = map.remove(context);
if( scopedObjects!=null )
scopedObjects.clear();
if( Ln.isVerboseEnabled() )
Ln.v("Contexts in the %s scope map after removing %s: %s", Thread.currentThread().getName(), context, Strings.join( ", ", map.keySet()));
}
}
public Provider<Context> scope() {
return scope(Key.get(Context.class), new Provider<Context>() {
public Context get() {
return app;
}
});
}
/**
* @param <T> is only allowed to be Context
*/
@SuppressWarnings({"SuspiciousMethodCalls","unchecked"})
public <T> Provider<T> scope(final Key<T> key, final Provider<T> unscoped) {
return new Provider<T>() {
public T get() {
<<<<<<< MINE
Map<Key<?>, Object> scopedObjects = getScopedObjectMap(key);
=======
final Map<Key<Context>,Object> map = values.get();
final Map<Key<Context>, Object> scopedObjects = map != null ? map : initialScopedObjectMap();
>>>>>>> YOURS
T current = (T) scopedObjects.get(key);
if (current == null && !scopedObjects.containsKey(key)) {
current = unscoped.get();
scopedObjects.put(key, current);
}
return current;
}
};
<<<<<<< MINE
}
protected void ensureContextStack() {
if (contextStack.get() == null) {
contextStack.set(new WeakActiveStack<Context>());
}
=======
>>>>>>> YOURS
}
protected <T> Map<Key<?>, Object> getScopedObjectMap(Key<T> key) {
final Context context = contextStack.get().peek();
Map<Key<?>,Object> scopedObjects = values.get(context);
if (scopedObjects == null) {
scopedObjects = Maps.newHashMap();
values.put(context, scopedObjects);
}
return scopedObjects;
}
/**
* A circular stack like structure of weak references.
* Calls to push while not add any new items to stack if the item already exists,
* it will simply bring the item to the top of the stack.
*
* Likewise, pop will not remove the item from the stack, it will simply make the next item
* the top, move the current top to the bottom. Thus creating a circular linked list type effect.
*
* To remove an item explicitly call the remove method.
*
* The stack also holds WeakReferences of T, these references will automatically be removed
* anytime the stack accessed. For performance they are only removed as they are encountered.
*
* So it is possible to get a null value back, even though you thought the stack had items in it.
* @param <T>
*/
public static class WeakActiveStack<T> {
static class Node<T> {
Node<T> previous;
Node<T> next;
WeakReference<T> value;
public Node(T value) {
this.value = new WeakReference<T>(value);
}
}
private Node<T> head;
private Node<T> tail;
/**
* Pushes the value onto the top of the stack.
* If the value exists in the stack it is simply brought to the top.
* @param value
*/
public void push(T value) {
if (head == null) {
head = new Node<T>(value);
tail = head;
} else {
Node<T> existingNode = findNode(value);
if (existingNode == null) {
Node<T> newNode = new Node<T>(value);
newNode.next = head;
head.previous = newNode;
head = newNode;
} else {
if (existingNode == head) return;
if (existingNode == tail) {
tail = existingNode.previous;
tail.next= null;
}
if (existingNode.previous != null) {
existingNode.previous.next = existingNode.next;
}
if (existingNode.next != null) {
existingNode.next.previous = existingNode.previous;
}
existingNode.next = head;
head.previous = existingNode;
head = existingNode;
head.previous = null;
}
}
}
/**
* Pops the first item off the stack, then moves it to the bottom.
* Popping is an infinite operation that will never end, it just keeps moving the top item to the bottom.
* Popping will also dispose of items whose weak references have been collected.
* @return The value of the item at the top of the stack.
*/
public T pop() {
WeakActiveStack.Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
if (node.next != null) {
head = node.next;
node.previous = tail;
tail.next = node;
node.next = null;
head.previous = null;
tail = node;
}
return value;
}
}
return null;
}
/**
* Non destructive read of the item at the top of stack.
* @return the first non collected referent held, or null if nothing is available.
*/
public T peek() {
Node<T> node = head;
while (node != null) {
final T value = node.value.get();
if (value == null) {
node = disposeOfNode(node);
} else {
return value;
}
}
return null;
}
/**
* Removes the item from the stack.
* @param value
*/
public void remove(T value) {
Node<T> node = findNode(value);
disposeOfNode(node);
}
/**
* Removes a node ensuring all links are properly updated.
* @param node
* @return The next node in the stack.
*/
protected Node<T> disposeOfNode(Node<T> node) {
if (node == head) {
head = node.next;
if (head == null) {
tail = null;
} else {
head.previous = null;
}
}
if (node.previous != null) {
node.previous.next = node.next;
}
if (node.next != null) {
node.next.previous = node.previous;
}
if (node == tail) {
tail = node.previous;
tail.next = null;
}
return node.next;
}
/**
* Finds a node given a value
* Will dispose of nodes if needed as it iterates the stack.
* @param value
* @return The node if found or null
*/
protected Node<T> findNode(T value) {
Node<T> node = head;
while (node != null) {
final T nodeValue = node.value.get();
if (nodeValue == null) {
node = disposeOfNode(node);
} else if (nodeValue.equals(value)) {
return node;
} else {
node = node.next;
}
}
return null;
}
}
}
Diff Result
No diff
Case 24 - roboguice.rev_bee33_b6d1a.EventManager.java
void registerObserver(Context context, Object instance, Method method, Class event)
Left
modified signature to make use of generics. Type: Class → Class
added overloaded versions of method
modified body to call added overloaded version
Right modified body
Unstructured reported conflict between near located versions of method
Safe reported conflict between one version and empty. Kept all other versions.
MergeMethods reported no conflict
KeepBothMethods kept both versions of method. FN: do not compile, as they have same erasure.
void unregisterObserver(Context context, Object instance, Class event)
Very similar to registerObserver. Difference: MergeMethods reported conflict on body
Base
package roboguice.event;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<ObserverReference<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<ObserverReference<?>>>>();
public boolean isEnabled() {
return true;
}
/**
* Registers given method with provided context and event.
*/
public void registerObserver(Context context, Object instance, Method method, Class event) {
if (!isEnabled()) return;
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<ObserverReference<?>>>();
registrations.put(context, methods);
}
Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<ObserverReference<?>>();
methods.put(event, observers);
}
/*
final Returns returns = (Returns) event.getAnnotation(Returns.class);
if( returns!=null ) {
if( !returns.value().isAssignableFrom(method.getReturnType()) )
throw new RuntimeException( String.format("Method %s.%s does not return a value that is assignable to %s",method.getDeclaringClass().getName(),method.getName(),returns.value().getName()) );
if( !observers.isEmpty() ) {
final ObserverReference observer = observers.iterator().next();
throw new RuntimeException( String.format("Only one observer allowed for event types that return a value annotation. Previously registered observer is %s.%s", observer.method.getDeclaringClass().getName(), observer.method.getName()));
}
}
*/
observers.add(new ObserverReference(instance, method));
}
/**
* Unregisters all methods observing the given event from the provided context.
*/
public void unregisterObserver(Context context, Object instance, Class event) {
if (!isEnabled()) return;
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<ObserverReference<?>> iterator = observers.iterator(); iterator.hasNext();) {
ObserverReference observer = iterator.next();
if (observer != null) {
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*/
public void clear( Context context ) {
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*/
public void fire( Object event ) {
fire(contextProvider.get(),event);
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context
* @param event
*/
public void fire(Context context, Object event) {
if (!isEnabled()) return;
/*
if( event.getClass().getAnnotation(Returns.class)!=null )
throw new RuntimeException("You must use notifyWithResult for events that expect return values");
*/
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (ObserverReference observer : observers) {
try {
observer.invoke(event,null);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* A result handler can be provided to deal with the return values from the invoked observer methods.
*
* @param context
* @param event
*/
/*
// Disabled for now until we can figure out best way to proceed
public <ResultType> ResultType notifyWithResult(Context context, Object event, ResultType defaultValue ) {
if (!isEnabled()) return defaultValue;
if( event.getClass().getAnnotation(Returns.class)==null )
throw new RuntimeException("You must use fire with events that do not expect return values");
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return defaultValue;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return defaultValue;
for (ObserverReference<?> o : observers) {
final ObserverReference<ResultType> observer = (ObserverReference<ResultType>) o;
try {
return observer.invoke( event, defaultValue);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
return defaultValue;
}
*/
public static class NullEventManager extends EventManager {
@Override
public boolean isEnabled() {
return false;
}
}
public static class ObserverReference<ResultType> {
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverReference(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
method.setAccessible(true);
}
public ResultType invoke(Object event, ResultType defaultValue ) throws InvocationTargetException, IllegalAccessException {
final Object instance = instanceReference.get();
return instance == null ? defaultValue : (ResultType) method.invoke(instance, event);
}
}
}
package roboguice.event;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<ObserverReference<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<ObserverReference<?>>>>();
public boolean isEnabled() {
return true;
}
/**
* Registers given method with provided context and event.
*/
public void registerObserver(Context context, Object instance, Method method, Class event) {
if (!isEnabled()) return;
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<ObserverReference<?>>>();
registrations.put(context, methods);
}
Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<ObserverReference<?>>();
methods.put(event, observers);
}
/*
final Returns returns = (Returns) event.getAnnotation(Returns.class);
if( returns!=null ) {
if( !returns.value().isAssignableFrom(method.getReturnType()) )
throw new RuntimeException( String.format("Method %s.%s does not return a value that is assignable to %s",method.getDeclaringClass().getName(),method.getName(),returns.value().getName()) );
if( !observers.isEmpty() ) {
final ObserverReference observer = observers.iterator().next();
throw new RuntimeException( String.format("Only one observer allowed for event types that return a value annotation. Previously registered observer is %s.%s", observer.method.getDeclaringClass().getName(), observer.method.getName()));
}
}
*/
observers.add(new ObserverReference(instance, method));
}
/**
* Unregisters all methods observing the given event from the provided context.
*/
public void unregisterObserver(Context context, Object instance, Class event) {
if (!isEnabled()) return;
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<ObserverReference<?>> iterator = observers.iterator(); iterator.hasNext();) {
ObserverReference observer = iterator.next();
if (observer != null) {
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*/
public void clear( Context context ) {
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*/
public void fire( Object event ) {
fire(contextProvider.get(),event);
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context
* @param event
*/
public void fire(Context context, Object event) {
if (!isEnabled()) return;
/*
if( event.getClass().getAnnotation(Returns.class)!=null )
throw new RuntimeException("You must use notifyWithResult for events that expect return values");
*/
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (ObserverReference observer : observers) {
try {
observer.invoke(event,null);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* A result handler can be provided to deal with the return values from the invoked observer methods.
*
* @param context
* @param event
*/
/*
// Disabled for now until we can figure out best way to proceed
public <ResultType> ResultType notifyWithResult(Context context, Object event, ResultType defaultValue ) {
if (!isEnabled()) return defaultValue;
if( event.getClass().getAnnotation(Returns.class)==null )
throw new RuntimeException("You must use fire with events that do not expect return values");
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return defaultValue;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return defaultValue;
for (ObserverReference<?> o : observers) {
final ObserverReference<ResultType> observer = (ObserverReference<ResultType>) o;
try {
return observer.invoke( event, defaultValue);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
return defaultValue;
}
*/
public static class NullEventManager extends EventManager {
@Override
public boolean isEnabled() {
return false;
}
}
public static class ObserverReference<ResultType> {
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverReference(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
method.setAccessible(true);
}
public ResultType invoke(Object event, ResultType defaultValue ) throws InvocationTargetException, IllegalAccessException {
final Object instance = instanceReference.get();
return instance == null ? defaultValue : (ResultType) method.invoke(instance, event);
}
}
}
Left
package roboguice.event;
import roboguice.event.javaassist.RuntimeSupport;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<EventListener<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<EventListener<?>>>>();
public boolean isEnabled() {
return true;
}
/**
* Register an observer EventListener with the current context (provided).
*
* @param event to observe
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Class<T> event, EventListener listener ) {
registerObserver(contextProvider.get(),event,listener);
}
/**
* Register a method observer with the current context (provided).
*
* @param instance to be called
* @param method to be called
* @param event observed
* @param <T> event type
*/
public <T> void registerObserver(Object instance, Method method, Class<T> event) {
registerObserver(contextProvider.get(),instance,method,event);
}
/**
* Unregister the given EventListener with the current context (provided).
*
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Class<T> event, EventListener<T> listener ) {
unregisterObserver(contextProvider.get(),event,listener);
}
/**
* Unregister the given event from the current context (provided).
*
* @param instance to be unregistered
* @param event observed
* @param <T> event type
*/
public <T> void unregisterObserver(Object instance, Class<T> event) {
unregisterObserver(contextProvider.get(),instance,event);
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param event observed
*/
public void fire( Object event ) {
fire(contextProvider.get(), event);
}
/**
* Register the given EventListener to the contest and event class.
*
* @param context associated with event
* @param event observed
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Context context, Class<T> event, EventListener listener ) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<EventListener<?>>>();
registrations.put(context, methods);
}
Set<EventListener<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<EventListener<?>>();
methods.put(event, observers);
}
observers.add(listener);
}
/**
* Registers given method with provided context and event.
*
* @param context associated with event
* @param instance to be called
* @param method to be called
* @param event observed
*/
public <T> void registerObserver(Context context, Object instance, Method method, Class<T> event) {
registerObserver(context, event, new ObserverMethodListener<T>(instance, method));
}
/**
* Unregisters the provided event listener from the given event
*
* @param context associated with event
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Context context, Class<T> event, EventListener<T> listener ) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener registeredListener = iterator.next();
if (registeredListener == listener) {
iterator.remove();
break;
}
}
}
/**
* Unregister all methods observing the given event from the provided context.
*
* @param context associated with event
* @param instance to be unregistered
* @param event observed
*/
public <T> void unregisterObserver(Context context, Object instance, Class<T> event) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener listener = iterator.next();
if( listener instanceof ObserverMethodListener ) {
final ObserverMethodListener observer = ((ObserverMethodListener)listener);
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*
* @param context associated with event
*/
public void clear( Context context ) {
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context associated with event
* @param event observed
*/
public void fire(Context context, Object event) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (EventListener observer : observers)
observer.onEvent(event);
}
public static class NullEventManager extends EventManager {
@Override
public boolean isEnabled() {
return false;
}
}
public static class ObserverMethodListener<T> implements EventListener<T> {
protected String descriptor;
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverMethodListener(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
this.descriptor = method.getName() + ':' + RuntimeSupport.makeDescriptor(method);
method.setAccessible(true);
}
public void onEvent(T event) {
try {
final Object instance = instanceReference.get();
if (instance != null) {
method.invoke(instance, event);
} else {
Ln.w("trying to observe event %1$s on disposed context, consider explicitly calling EventManager.unregisterObserver", method.getName());
}
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final ObserverMethodListener that = (ObserverMethodListener) o;
if (descriptor != null ? !descriptor.equals(that.descriptor) : that.descriptor != null) return false;
final Object thisInstance = instanceReference.get();
final Object thatInstance = that.instanceReference.get();
return !(thisInstance != null ? !thisInstance.equals(thatInstance) : thatInstance != null);
}
@Override
public int hashCode() {
int result = descriptor != null ? descriptor.hashCode() : 0;
final Object thisInstance = instanceReference.get();
result = 31 * result + (thisInstance != null ? thisInstance.hashCode() : 0);
return result;
}
}
}
package roboguice.event;
import roboguice.event.javaassist.RuntimeSupport;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<EventListener<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<EventListener<?>>>>();
public boolean isEnabled() {
return true;
}
/**
* Register an observer EventListener with the current context (provided).
*
* @param event to observe
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Class<T> event, EventListener listener ) {
registerObserver(contextProvider.get(),event,listener);
}
/**
* Register a method observer with the current context (provided).
*
* @param instance to be called
* @param method to be called
* @param event observed
* @param <T> event type
*/
public <T> void registerObserver(Object instance, Method method, Class<T> event) {
registerObserver(contextProvider.get(),instance,method,event);
}
/**
* Unregister the given EventListener with the current context (provided).
*
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Class<T> event, EventListener<T> listener ) {
unregisterObserver(contextProvider.get(),event,listener);
}
/**
* Unregister the given event from the current context (provided).
*
* @param instance to be unregistered
* @param event observed
* @param <T> event type
*/
public <T> void unregisterObserver(Object instance, Class<T> event) {
unregisterObserver(contextProvider.get(),instance,event);
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param event observed
*/
public void fire( Object event ) {
fire(contextProvider.get(), event);
}
/**
* Register the given EventListener to the contest and event class.
*
* @param context associated with event
* @param event observed
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Context context, Class<T> event, EventListener listener ) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<EventListener<?>>>();
registrations.put(context, methods);
}
Set<EventListener<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<EventListener<?>>();
methods.put(event, observers);
}
observers.add(listener);
}
/**
* Registers given method with provided context and event.
*
* @param context associated with event
* @param instance to be called
* @param method to be called
* @param event observed
*/
public <T> void registerObserver(Context context, Object instance, Method method, Class<T> event) {
registerObserver(context, event, new ObserverMethodListener<T>(instance, method));
}
/**
* Unregisters the provided event listener from the given event
*
* @param context associated with event
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Context context, Class<T> event, EventListener<T> listener ) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener registeredListener = iterator.next();
if (registeredListener == listener) {
iterator.remove();
break;
}
}
}
/**
* Unregister all methods observing the given event from the provided context.
*
* @param context associated with event
* @param instance to be unregistered
* @param event observed
*/
public <T> void unregisterObserver(Context context, Object instance, Class<T> event) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener listener = iterator.next();
if( listener instanceof ObserverMethodListener ) {
final ObserverMethodListener observer = ((ObserverMethodListener)listener);
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*
* @param context associated with event
*/
public void clear( Context context ) {
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context associated with event
* @param event observed
*/
public void fire(Context context, Object event) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (EventListener observer : observers)
observer.onEvent(event);
}
public static class NullEventManager extends EventManager {
@Override
public boolean isEnabled() {
return false;
}
}
public static class ObserverMethodListener<T> implements EventListener<T> {
protected String descriptor;
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverMethodListener(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
this.descriptor = method.getName() + ':' + RuntimeSupport.makeDescriptor(method);
method.setAccessible(true);
}
public void onEvent(T event) {
try {
final Object instance = instanceReference.get();
if (instance != null) {
method.invoke(instance, event);
} else {
Ln.w("trying to observe event %1$s on disposed context, consider explicitly calling EventManager.unregisterObserver", method.getName());
}
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final ObserverMethodListener that = (ObserverMethodListener) o;
if (descriptor != null ? !descriptor.equals(that.descriptor) : that.descriptor != null) return false;
final Object thisInstance = instanceReference.get();
final Object thatInstance = that.instanceReference.get();
return !(thisInstance != null ? !thisInstance.equals(thatInstance) : thatInstance != null);
}
@Override
public int hashCode() {
int result = descriptor != null ? descriptor.hashCode() : 0;
final Object thisInstance = instanceReference.get();
result = 31 * result + (thisInstance != null ? thisInstance.hashCode() : 0);
return result;
}
}
}
Right
package roboguice.event;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<ObserverReference<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<ObserverReference<?>>>>();
/**
* Registers given method with provided context and event.
*/
public void registerObserver(Context context, Object instance, Method method, Class event) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<ObserverReference<?>>>();
registrations.put(context, methods);
}
Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<ObserverReference<?>>();
methods.put(event, observers);
}
/*
final Returns returns = (Returns) event.getAnnotation(Returns.class);
if( returns!=null ) {
if( !returns.value().isAssignableFrom(method.getReturnType()) )
throw new RuntimeException( String.format("Method %s.%s does not return a value that is assignable to %s",method.getDeclaringClass().getName(),method.getName(),returns.value().getName()) );
if( !observers.isEmpty() ) {
final ObserverReference observer = observers.iterator().next();
throw new RuntimeException( String.format("Only one observer allowed for event types that return a value annotation. Previously registered observer is %s.%s", observer.method.getDeclaringClass().getName(), observer.method.getName()));
}
}
*/
observers.add(new ObserverReference(instance, method));
}
/**
* Unregisters all methods observing the given event from the provided context.
*/
public void unregisterObserver(Context context, Object instance, Class event) {
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<ObserverReference<?>> iterator = observers.iterator(); iterator.hasNext();) {
ObserverReference observer = iterator.next();
if (observer != null) {
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*/
public void clear( Context context ) {
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*/
public void fire( Object event ) {
fire(contextProvider.get(),event);
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context
* @param event
*/
public void fire(Context context, Object event) {
/*
if( event.getClass().getAnnotation(Returns.class)!=null )
throw new RuntimeException("You must use notifyWithResult for events that expect return values");
*/
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (ObserverReference observer : observers) {
try {
observer.invoke(event,null);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* A result handler can be provided to deal with the return values from the invoked observer methods.
*
* @param context
* @param event
*/
/*
// Disabled for now until we can figure out best way to proceed
public <ResultType> ResultType notifyWithResult(Context context, Object event, ResultType defaultValue ) {
if (!isEnabled()) return defaultValue;
if( event.getClass().getAnnotation(Returns.class)==null )
throw new RuntimeException("You must use fire with events that do not expect return values");
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return defaultValue;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return defaultValue;
for (ObserverReference<?> o : observers) {
final ObserverReference<ResultType> observer = (ObserverReference<ResultType>) o;
try {
return observer.invoke( event, defaultValue);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
return defaultValue;
}
*/
public static class ObserverReference<ResultType> {
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverReference(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
method.setAccessible(true);
}
public ResultType invoke(Object event, ResultType defaultValue ) throws InvocationTargetException, IllegalAccessException {
final Object instance = instanceReference.get();
return instance == null ? defaultValue : (ResultType) method.invoke(instance, event);
}
}
}
package roboguice.event;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<ObserverReference<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<ObserverReference<?>>>>();
/**
* Registers given method with provided context and event.
*/
public void registerObserver(Context context, Object instance, Method method, Class event) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<ObserverReference<?>>>();
registrations.put(context, methods);
}
Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<ObserverReference<?>>();
methods.put(event, observers);
}
/*
final Returns returns = (Returns) event.getAnnotation(Returns.class);
if( returns!=null ) {
if( !returns.value().isAssignableFrom(method.getReturnType()) )
throw new RuntimeException( String.format("Method %s.%s does not return a value that is assignable to %s",method.getDeclaringClass().getName(),method.getName(),returns.value().getName()) );
if( !observers.isEmpty() ) {
final ObserverReference observer = observers.iterator().next();
throw new RuntimeException( String.format("Only one observer allowed for event types that return a value annotation. Previously registered observer is %s.%s", observer.method.getDeclaringClass().getName(), observer.method.getName()));
}
}
*/
observers.add(new ObserverReference(instance, method));
}
/**
* Unregisters all methods observing the given event from the provided context.
*/
public void unregisterObserver(Context context, Object instance, Class event) {
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<ObserverReference<?>> iterator = observers.iterator(); iterator.hasNext();) {
ObserverReference observer = iterator.next();
if (observer != null) {
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*/
public void clear( Context context ) {
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*/
public void fire( Object event ) {
fire(contextProvider.get(),event);
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context
* @param event
*/
public void fire(Context context, Object event) {
/*
if( event.getClass().getAnnotation(Returns.class)!=null )
throw new RuntimeException("You must use notifyWithResult for events that expect return values");
*/
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (ObserverReference observer : observers) {
try {
observer.invoke(event,null);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* A result handler can be provided to deal with the return values from the invoked observer methods.
*
* @param context
* @param event
*/
/*
// Disabled for now until we can figure out best way to proceed
public <ResultType> ResultType notifyWithResult(Context context, Object event, ResultType defaultValue ) {
if (!isEnabled()) return defaultValue;
if( event.getClass().getAnnotation(Returns.class)==null )
throw new RuntimeException("You must use fire with events that do not expect return values");
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return defaultValue;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return defaultValue;
for (ObserverReference<?> o : observers) {
final ObserverReference<ResultType> observer = (ObserverReference<ResultType>) o;
try {
return observer.invoke( event, defaultValue);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
return defaultValue;
}
*/
public static class ObserverReference<ResultType> {
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverReference(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
method.setAccessible(true);
}
public ResultType invoke(Object event, ResultType defaultValue ) throws InvocationTargetException, IllegalAccessException {
final Object instance = instanceReference.get();
return instance == null ? defaultValue : (ResultType) method.invoke(instance, event);
}
}
}
MergeMethods
package roboguice.event;
import roboguice.event.javaassist.RuntimeSupport;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
<<<<<<< MINE
public static class NullEventManager extends EventManager {
@Override
public boolean isEnabled() {
return false;
}
}
=======
>>>>>>> YOURS
<<<<<<< MINE
=======
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* A result handler can be provided to deal with the return values from the invoked observer methods.
*
* @param context
* @param event
*/
/*
// Disabled for now until we can figure out best way to proceed
public <ResultType> ResultType notifyWithResult(Context context, Object event, ResultType defaultValue ) {
if (!isEnabled()) return defaultValue;
if( event.getClass().getAnnotation(Returns.class)==null )
throw new RuntimeException("You must use fire with events that do not expect return values");
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return defaultValue;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return defaultValue;
for (ObserverReference<?> o : observers) {
final ObserverReference<ResultType> observer = (ObserverReference<ResultType>) o;
try {
return observer.invoke( event, defaultValue);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
return defaultValue;
}
*/
public static class ObserverReference <ResultType> {
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverReference(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
method.setAccessible(true);
}
public ResultType invoke(Object event, ResultType defaultValue ) throws InvocationTargetException, IllegalAccessException {
final Object instance = instanceReference.get();
return instance == null ? defaultValue : (ResultType) method.invoke(instance, event);
}
}
>>>>>>> YOURS
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<EventListener<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<EventListener<?>>>>();
/**
* Register an observer EventListener with the current context (provided).
*
* @param event to observe
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Class<T> event, EventListener listener ) {
registerObserver(contextProvider.get(),event,listener);
}
/**
* Register a method observer with the current context (provided).
*
* @param instance to be called
* @param method to be called
* @param event observed
* @param <T> event type
*/
public <T> void registerObserver(Object instance, Method method, Class<T> event) {
registerObserver(contextProvider.get(),instance,method,event);
}
/**
* Unregister the given EventListener with the current context (provided).
*
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Class<T> event, EventListener<T> listener ) {
unregisterObserver(contextProvider.get(),event,listener);
}
/**
* Unregister the given event from the current context (provided).
*
* @param instance to be unregistered
* @param event observed
* @param <T> event type
*/
public <T> void unregisterObserver(Object instance, Class<T> event) {
unregisterObserver(contextProvider.get(),instance,event);
}
/**
* Registers given method with provided context and event.
*/
public void registerObserver(Context context, Object instance, Method method, Class event) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<ObserverReference<?>>>();
registrations.put(context, methods);
}
Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<ObserverReference<?>>();
methods.put(event, observers);
}
/*
final Returns returns = (Returns) event.getAnnotation(Returns.class);
if( returns!=null ) {
if( !returns.value().isAssignableFrom(method.getReturnType()) )
throw new RuntimeException( String.format("Method %s.%s does not return a value that is assignable to %s",method.getDeclaringClass().getName(),method.getName(),returns.value().getName()) );
if( !observers.isEmpty() ) {
final ObserverReference observer = observers.iterator().next();
throw new RuntimeException( String.format("Only one observer allowed for event types that return a value annotation. Previously registered observer is %s.%s", observer.method.getDeclaringClass().getName(), observer.method.getName()));
}
}
*/
observers.add(new ObserverReference(instance, method));
}
/**
* Unregisters all methods observing the given event from the provided context.
*/
public <T> void unregisterObserver(Context context, Object instance, Class<T> event) {
<<<<<<< MINE
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
=======
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener listener = iterator.next();
if( listener instanceof ObserverMethodListener ) {
final ObserverMethodListener observer = ((ObserverMethodListener)listener);
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*
* @param context associated with event
*/
public void clear( Context context ) {
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param event observed
*/
public void fire( Object event ) {
fire(contextProvider.get(),event);
}
/**
* Register the given EventListener to the contest and event class.
*
* @param context associated with event
* @param event observed
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Context context, Class<T> event, EventListener listener ) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<EventListener<?>>>();
registrations.put(context, methods);
}
Set<EventListener<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<EventListener<?>>();
methods.put(event, observers);
}
observers.add(listener);
}
/**
* Registers given method with provided context and event.
*
* @param context associated with event
* @param instance to be called
* @param method to be called
* @param event observed
*/
public <T> void registerObserver(Context context, Object instance, Method method, Class<T> event) {
registerObserver(context, event, new ObserverMethodListener<T>(instance, method));
}
/**
* Unregisters the provided event listener from the given event
*
* @param context associated with event
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Context context, Class<T> event, EventListener<T> listener ) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener registeredListener = iterator.next();
if (registeredListener == listener) {
iterator.remove();
break;
}
}
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context associated with event
* @param event observed
*/
public void fire(Context context, Object event) {
<<<<<<< MINE
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
=======
/*
if( event.getClass().getAnnotation(Returns.class)!=null )
throw new RuntimeException("You must use notifyWithResult for events that expect return values");
*/
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (EventListener observer : observers)
observer.onEvent(event);
}
public static class ObserverMethodListener <T> implements EventListener<T> {
protected String descriptor;
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverMethodListener(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
this.descriptor = method.getName() + ':' + RuntimeSupport.makeDescriptor(method);
method.setAccessible(true);
}
public void onEvent(T event) {
try {
final Object instance = instanceReference.get();
if (instance != null) {
method.invoke(instance, event);
} else {
Ln.w("trying to observe event %1$s on disposed context, consider explicitly calling EventManager.unregisterObserver", method.getName());
}
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final ObserverMethodListener that = (ObserverMethodListener) o;
if (descriptor != null ? !descriptor.equals(that.descriptor) : that.descriptor != null) return false;
final Object thisInstance = instanceReference.get();
final Object thatInstance = that.instanceReference.get();
return !(thisInstance != null ? !thisInstance.equals(thatInstance) : thatInstance != null);
}
@Override
public int hashCode() {
int result = descriptor != null ? descriptor.hashCode() : 0;
final Object thisInstance = instanceReference.get();
result = 31 * result + (thisInstance != null ? thisInstance.hashCode() : 0);
return result;
}
}
}
package roboguice.event;
import roboguice.event.javaassist.RuntimeSupport;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
<<<<<<< MINE
public static class NullEventManager extends EventManager {
@Override
public boolean isEnabled() {
return false;
}
}
=======
>>>>>>> YOURS
<<<<<<< MINE
=======
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* A result handler can be provided to deal with the return values from the invoked observer methods.
*
* @param context
* @param event
*/
/*
// Disabled for now until we can figure out best way to proceed
public <ResultType> ResultType notifyWithResult(Context context, Object event, ResultType defaultValue ) {
if (!isEnabled()) return defaultValue;
if( event.getClass().getAnnotation(Returns.class)==null )
throw new RuntimeException("You must use fire with events that do not expect return values");
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return defaultValue;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return defaultValue;
for (ObserverReference<?> o : observers) {
final ObserverReference<ResultType> observer = (ObserverReference<ResultType>) o;
try {
return observer.invoke( event, defaultValue);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
return defaultValue;
}
*/
public static class ObserverReference <ResultType> {
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverReference(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
method.setAccessible(true);
}
public ResultType invoke(Object event, ResultType defaultValue ) throws InvocationTargetException, IllegalAccessException {
final Object instance = instanceReference.get();
return instance == null ? defaultValue : (ResultType) method.invoke(instance, event);
}
}
>>>>>>> YOURS
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<EventListener<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<EventListener<?>>>>();
/**
* Register an observer EventListener with the current context (provided).
*
* @param event to observe
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Class<T> event, EventListener listener ) {
registerObserver(contextProvider.get(),event,listener);
}
/**
* Register a method observer with the current context (provided).
*
* @param instance to be called
* @param method to be called
* @param event observed
* @param <T> event type
*/
public <T> void registerObserver(Object instance, Method method, Class<T> event) {
registerObserver(contextProvider.get(),instance,method,event);
}
/**
* Unregister the given EventListener with the current context (provided).
*
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Class<T> event, EventListener<T> listener ) {
unregisterObserver(contextProvider.get(),event,listener);
}
/**
* Unregister the given event from the current context (provided).
*
* @param instance to be unregistered
* @param event observed
* @param <T> event type
*/
public <T> void unregisterObserver(Object instance, Class<T> event) {
unregisterObserver(contextProvider.get(),instance,event);
}
/**
* Registers given method with provided context and event.
*/
public void registerObserver(Context context, Object instance, Method method, Class event) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<ObserverReference<?>>>();
registrations.put(context, methods);
}
Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<ObserverReference<?>>();
methods.put(event, observers);
}
/*
final Returns returns = (Returns) event.getAnnotation(Returns.class);
if( returns!=null ) {
if( !returns.value().isAssignableFrom(method.getReturnType()) )
throw new RuntimeException( String.format("Method %s.%s does not return a value that is assignable to %s",method.getDeclaringClass().getName(),method.getName(),returns.value().getName()) );
if( !observers.isEmpty() ) {
final ObserverReference observer = observers.iterator().next();
throw new RuntimeException( String.format("Only one observer allowed for event types that return a value annotation. Previously registered observer is %s.%s", observer.method.getDeclaringClass().getName(), observer.method.getName()));
}
}
*/
observers.add(new ObserverReference(instance, method));
}
/**
* Unregisters all methods observing the given event from the provided context.
*/
public <T> void unregisterObserver(Context context, Object instance, Class<T> event) {
<<<<<<< MINE
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
=======
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener listener = iterator.next();
if( listener instanceof ObserverMethodListener ) {
final ObserverMethodListener observer = ((ObserverMethodListener)listener);
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*
* @param context associated with event
*/
public void clear( Context context ) {
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param event observed
*/
public void fire( Object event ) {
fire(contextProvider.get(),event);
}
/**
* Register the given EventListener to the contest and event class.
*
* @param context associated with event
* @param event observed
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Context context, Class<T> event, EventListener listener ) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<EventListener<?>>>();
registrations.put(context, methods);
}
Set<EventListener<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<EventListener<?>>();
methods.put(event, observers);
}
observers.add(listener);
}
/**
* Registers given method with provided context and event.
*
* @param context associated with event
* @param instance to be called
* @param method to be called
* @param event observed
*/
public <T> void registerObserver(Context context, Object instance, Method method, Class<T> event) {
registerObserver(context, event, new ObserverMethodListener<T>(instance, method));
}
/**
* Unregisters the provided event listener from the given event
*
* @param context associated with event
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Context context, Class<T> event, EventListener<T> listener ) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener registeredListener = iterator.next();
if (registeredListener == listener) {
iterator.remove();
break;
}
}
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context associated with event
* @param event observed
*/
public void fire(Context context, Object event) {
<<<<<<< MINE
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
=======
/*
if( event.getClass().getAnnotation(Returns.class)!=null )
throw new RuntimeException("You must use notifyWithResult for events that expect return values");
*/
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (EventListener observer : observers)
observer.onEvent(event);
}
public static class ObserverMethodListener <T> implements EventListener<T> {
protected String descriptor;
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverMethodListener(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
this.descriptor = method.getName() + ':' + RuntimeSupport.makeDescriptor(method);
method.setAccessible(true);
}
public void onEvent(T event) {
try {
final Object instance = instanceReference.get();
if (instance != null) {
method.invoke(instance, event);
} else {
Ln.w("trying to observe event %1$s on disposed context, consider explicitly calling EventManager.unregisterObserver", method.getName());
}
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final ObserverMethodListener that = (ObserverMethodListener) o;
if (descriptor != null ? !descriptor.equals(that.descriptor) : that.descriptor != null) return false;
final Object thisInstance = instanceReference.get();
final Object thatInstance = that.instanceReference.get();
return !(thisInstance != null ? !thisInstance.equals(thatInstance) : thatInstance != null);
}
@Override
public int hashCode() {
int result = descriptor != null ? descriptor.hashCode() : 0;
final Object thisInstance = instanceReference.get();
result = 31 * result + (thisInstance != null ? thisInstance.hashCode() : 0);
return result;
}
}
}
KeepBothMethods
package roboguice.event;
import roboguice.event.javaassist.RuntimeSupport;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
<<<<<<< MINE
public static class NullEventManager extends EventManager {
@Override
public boolean isEnabled() {
return false;
}
}
=======
>>>>>>> YOURS
<<<<<<< MINE
=======
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* A result handler can be provided to deal with the return values from the invoked observer methods.
*
* @param context
* @param event
*/
/*
// Disabled for now until we can figure out best way to proceed
public <ResultType> ResultType notifyWithResult(Context context, Object event, ResultType defaultValue ) {
if (!isEnabled()) return defaultValue;
if( event.getClass().getAnnotation(Returns.class)==null )
throw new RuntimeException("You must use fire with events that do not expect return values");
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return defaultValue;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return defaultValue;
for (ObserverReference<?> o : observers) {
final ObserverReference<ResultType> observer = (ObserverReference<ResultType>) o;
try {
return observer.invoke( event, defaultValue);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
return defaultValue;
}
*/
public static class ObserverReference <ResultType> {
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverReference(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
method.setAccessible(true);
}
public ResultType invoke(Object event, ResultType defaultValue ) throws InvocationTargetException, IllegalAccessException {
final Object instance = instanceReference.get();
return instance == null ? defaultValue : (ResultType) method.invoke(instance, event);
}
}
>>>>>>> YOURS
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<EventListener<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<EventListener<?>>>>();
/**
* Register an observer EventListener with the current context (provided).
*
* @param event to observe
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Class<T> event, EventListener listener ) {
registerObserver(contextProvider.get(),event,listener);
}
/**
* Register a method observer with the current context (provided).
*
* @param instance to be called
* @param method to be called
* @param event observed
* @param <T> event type
*/
public <T> void registerObserver(Object instance, Method method, Class<T> event) {
registerObserver(contextProvider.get(),instance,method,event);
}
/**
* Unregister the given EventListener with the current context (provided).
*
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Class<T> event, EventListener<T> listener ) {
unregisterObserver(contextProvider.get(),event,listener);
}
/**
* Unregister the given event from the current context (provided).
*
* @param instance to be unregistered
* @param event observed
* @param <T> event type
*/
public <T> void unregisterObserver(Object instance, Class<T> event) {
unregisterObserver(contextProvider.get(),instance,event);
}
/**
* Registers given method with provided context and event.
*/
public void registerObserver(Context context, Object instance, Method method, Class event) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<ObserverReference<?>>>();
registrations.put(context, methods);
}
Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<ObserverReference<?>>();
methods.put(event, observers);
}
/*
final Returns returns = (Returns) event.getAnnotation(Returns.class);
if( returns!=null ) {
if( !returns.value().isAssignableFrom(method.getReturnType()) )
throw new RuntimeException( String.format("Method %s.%s does not return a value that is assignable to %s",method.getDeclaringClass().getName(),method.getName(),returns.value().getName()) );
if( !observers.isEmpty() ) {
final ObserverReference observer = observers.iterator().next();
throw new RuntimeException( String.format("Only one observer allowed for event types that return a value annotation. Previously registered observer is %s.%s", observer.method.getDeclaringClass().getName(), observer.method.getName()));
}
}
*/
observers.add(new ObserverReference(instance, method));
}
/**
* Unregisters all methods observing the given event from the provided context.
*/
public void unregisterObserver(Context context, Object instance, Class event) {
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<ObserverReference<?>> iterator = observers.iterator(); iterator.hasNext();) {
ObserverReference observer = iterator.next();
if (observer != null) {
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*
* @param context associated with event
*/
public void clear( Context context ) {
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param event observed
*/
public void fire( Object event ) {
fire(contextProvider.get(),event);
}
/**
* Register the given EventListener to the contest and event class.
*
* @param context associated with event
* @param event observed
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Context context, Class<T> event, EventListener listener ) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<EventListener<?>>>();
registrations.put(context, methods);
}
Set<EventListener<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<EventListener<?>>();
methods.put(event, observers);
}
observers.add(listener);
}
/**
* Registers given method with provided context and event.
*
* @param context associated with event
* @param instance to be called
* @param method to be called
* @param event observed
*/
public <T> void registerObserver(Context context, Object instance, Method method, Class<T> event) {
registerObserver(context, event, new ObserverMethodListener<T>(instance, method));
}
/**
* Unregisters the provided event listener from the given event
*
* @param context associated with event
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Context context, Class<T> event, EventListener<T> listener ) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener registeredListener = iterator.next();
if (registeredListener == listener) {
iterator.remove();
break;
}
}
}
/**
* Unregister all methods observing the given event from the provided context.
*
* @param context associated with event
* @param instance to be unregistered
* @param event observed
*/
public <T> void unregisterObserver(Context context, Object instance, Class<T> event) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener listener = iterator.next();
if( listener instanceof ObserverMethodListener ) {
final ObserverMethodListener observer = ((ObserverMethodListener)listener);
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context associated with event
* @param event observed
*/
public void fire(Context context, Object event) {
<<<<<<< MINE
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
=======
/*
if( event.getClass().getAnnotation(Returns.class)!=null )
throw new RuntimeException("You must use notifyWithResult for events that expect return values");
*/
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (EventListener observer : observers)
observer.onEvent(event);
}
public static class ObserverMethodListener <T> implements EventListener<T> {
protected String descriptor;
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverMethodListener(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
this.descriptor = method.getName() + ':' + RuntimeSupport.makeDescriptor(method);
method.setAccessible(true);
}
public void onEvent(T event) {
try {
final Object instance = instanceReference.get();
if (instance != null) {
method.invoke(instance, event);
} else {
Ln.w("trying to observe event %1$s on disposed context, consider explicitly calling EventManager.unregisterObserver", method.getName());
}
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final ObserverMethodListener that = (ObserverMethodListener) o;
if (descriptor != null ? !descriptor.equals(that.descriptor) : that.descriptor != null) return false;
final Object thisInstance = instanceReference.get();
final Object thatInstance = that.instanceReference.get();
return !(thisInstance != null ? !thisInstance.equals(thatInstance) : thatInstance != null);
}
@Override
public int hashCode() {
int result = descriptor != null ? descriptor.hashCode() : 0;
final Object thisInstance = instanceReference.get();
result = 31 * result + (thisInstance != null ? thisInstance.hashCode() : 0);
return result;
}
}
}
package roboguice.event;
import roboguice.event.javaassist.RuntimeSupport;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
<<<<<<< MINE
public static class NullEventManager extends EventManager {
@Override
public boolean isEnabled() {
return false;
}
}
=======
>>>>>>> YOURS
<<<<<<< MINE
=======
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* A result handler can be provided to deal with the return values from the invoked observer methods.
*
* @param context
* @param event
*/
/*
// Disabled for now until we can figure out best way to proceed
public <ResultType> ResultType notifyWithResult(Context context, Object event, ResultType defaultValue ) {
if (!isEnabled()) return defaultValue;
if( event.getClass().getAnnotation(Returns.class)==null )
throw new RuntimeException("You must use fire with events that do not expect return values");
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return defaultValue;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return defaultValue;
for (ObserverReference<?> o : observers) {
final ObserverReference<ResultType> observer = (ObserverReference<ResultType>) o;
try {
return observer.invoke( event, defaultValue);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
return defaultValue;
}
*/
public static class ObserverReference <ResultType> {
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverReference(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
method.setAccessible(true);
}
public ResultType invoke(Object event, ResultType defaultValue ) throws InvocationTargetException, IllegalAccessException {
final Object instance = instanceReference.get();
return instance == null ? defaultValue : (ResultType) method.invoke(instance, event);
}
}
>>>>>>> YOURS
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<EventListener<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<EventListener<?>>>>();
/**
* Register an observer EventListener with the current context (provided).
*
* @param event to observe
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Class<T> event, EventListener listener ) {
registerObserver(contextProvider.get(),event,listener);
}
/**
* Register a method observer with the current context (provided).
*
* @param instance to be called
* @param method to be called
* @param event observed
* @param <T> event type
*/
public <T> void registerObserver(Object instance, Method method, Class<T> event) {
registerObserver(contextProvider.get(),instance,method,event);
}
/**
* Unregister the given EventListener with the current context (provided).
*
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Class<T> event, EventListener<T> listener ) {
unregisterObserver(contextProvider.get(),event,listener);
}
/**
* Unregister the given event from the current context (provided).
*
* @param instance to be unregistered
* @param event observed
* @param <T> event type
*/
public <T> void unregisterObserver(Object instance, Class<T> event) {
unregisterObserver(contextProvider.get(),instance,event);
}
/**
* Registers given method with provided context and event.
*/
public void registerObserver(Context context, Object instance, Method method, Class event) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<ObserverReference<?>>>();
registrations.put(context, methods);
}
Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<ObserverReference<?>>();
methods.put(event, observers);
}
/*
final Returns returns = (Returns) event.getAnnotation(Returns.class);
if( returns!=null ) {
if( !returns.value().isAssignableFrom(method.getReturnType()) )
throw new RuntimeException( String.format("Method %s.%s does not return a value that is assignable to %s",method.getDeclaringClass().getName(),method.getName(),returns.value().getName()) );
if( !observers.isEmpty() ) {
final ObserverReference observer = observers.iterator().next();
throw new RuntimeException( String.format("Only one observer allowed for event types that return a value annotation. Previously registered observer is %s.%s", observer.method.getDeclaringClass().getName(), observer.method.getName()));
}
}
*/
observers.add(new ObserverReference(instance, method));
}
/**
* Unregisters all methods observing the given event from the provided context.
*/
public void unregisterObserver(Context context, Object instance, Class event) {
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<ObserverReference<?>> iterator = observers.iterator(); iterator.hasNext();) {
ObserverReference observer = iterator.next();
if (observer != null) {
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*
* @param context associated with event
*/
public void clear( Context context ) {
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param event observed
*/
public void fire( Object event ) {
fire(contextProvider.get(),event);
}
/**
* Register the given EventListener to the contest and event class.
*
* @param context associated with event
* @param event observed
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Context context, Class<T> event, EventListener listener ) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<EventListener<?>>>();
registrations.put(context, methods);
}
Set<EventListener<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<EventListener<?>>();
methods.put(event, observers);
}
observers.add(listener);
}
/**
* Registers given method with provided context and event.
*
* @param context associated with event
* @param instance to be called
* @param method to be called
* @param event observed
*/
public <T> void registerObserver(Context context, Object instance, Method method, Class<T> event) {
registerObserver(context, event, new ObserverMethodListener<T>(instance, method));
}
/**
* Unregisters the provided event listener from the given event
*
* @param context associated with event
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Context context, Class<T> event, EventListener<T> listener ) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener registeredListener = iterator.next();
if (registeredListener == listener) {
iterator.remove();
break;
}
}
}
/**
* Unregister all methods observing the given event from the provided context.
*
* @param context associated with event
* @param instance to be unregistered
* @param event observed
*/
public <T> void unregisterObserver(Context context, Object instance, Class<T> event) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener listener = iterator.next();
if( listener instanceof ObserverMethodListener ) {
final ObserverMethodListener observer = ((ObserverMethodListener)listener);
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context associated with event
* @param event observed
*/
public void fire(Context context, Object event) {
<<<<<<< MINE
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
=======
/*
if( event.getClass().getAnnotation(Returns.class)!=null )
throw new RuntimeException("You must use notifyWithResult for events that expect return values");
*/
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (EventListener observer : observers)
observer.onEvent(event);
}
public static class ObserverMethodListener <T> implements EventListener<T> {
protected String descriptor;
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverMethodListener(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
this.descriptor = method.getName() + ':' + RuntimeSupport.makeDescriptor(method);
method.setAccessible(true);
}
public void onEvent(T event) {
try {
final Object instance = instanceReference.get();
if (instance != null) {
method.invoke(instance, event);
} else {
Ln.w("trying to observe event %1$s on disposed context, consider explicitly calling EventManager.unregisterObserver", method.getName());
}
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final ObserverMethodListener that = (ObserverMethodListener) o;
if (descriptor != null ? !descriptor.equals(that.descriptor) : that.descriptor != null) return false;
final Object thisInstance = instanceReference.get();
final Object thatInstance = that.instanceReference.get();
return !(thisInstance != null ? !thisInstance.equals(thatInstance) : thatInstance != null);
}
@Override
public int hashCode() {
int result = descriptor != null ? descriptor.hashCode() : 0;
final Object thisInstance = instanceReference.get();
result = 31 * result + (thisInstance != null ? thisInstance.hashCode() : 0);
return result;
}
}
}
Safe
package roboguice.event;
import roboguice.event.javaassist.RuntimeSupport;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
<<<<<<< MINE
public static class NullEventManager extends EventManager {
@Override
public boolean isEnabled() {
return false;
}
}
=======
>>>>>>> YOURS
<<<<<<< MINE
=======
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* A result handler can be provided to deal with the return values from the invoked observer methods.
*
* @param context
* @param event
*/
/*
// Disabled for now until we can figure out best way to proceed
public <ResultType> ResultType notifyWithResult(Context context, Object event, ResultType defaultValue ) {
if (!isEnabled()) return defaultValue;
if( event.getClass().getAnnotation(Returns.class)==null )
throw new RuntimeException("You must use fire with events that do not expect return values");
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return defaultValue;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return defaultValue;
for (ObserverReference<?> o : observers) {
final ObserverReference<ResultType> observer = (ObserverReference<ResultType>) o;
try {
return observer.invoke( event, defaultValue);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
return defaultValue;
}
*/
public static class ObserverReference <ResultType> {
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverReference(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
method.setAccessible(true);
}
public ResultType invoke(Object event, ResultType defaultValue ) throws InvocationTargetException, IllegalAccessException {
final Object instance = instanceReference.get();
return instance == null ? defaultValue : (ResultType) method.invoke(instance, event);
}
}
>>>>>>> YOURS
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<EventListener<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<EventListener<?>>>>();
/**
* Register an observer EventListener with the current context (provided).
*
* @param event to observe
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Class<T> event, EventListener listener ) {
registerObserver(contextProvider.get(),event,listener);
}
/**
* Register a method observer with the current context (provided).
*
* @param instance to be called
* @param method to be called
* @param event observed
* @param <T> event type
*/
public <T> void registerObserver(Object instance, Method method, Class<T> event) {
registerObserver(contextProvider.get(),instance,method,event);
}
/**
* Unregister the given EventListener with the current context (provided).
*
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Class<T> event, EventListener<T> listener ) {
unregisterObserver(contextProvider.get(),event,listener);
}
/**
* Unregister the given event from the current context (provided).
*
* @param instance to be unregistered
* @param event observed
* @param <T> event type
*/
public <T> void unregisterObserver(Object instance, Class<T> event) {
unregisterObserver(contextProvider.get(),instance,event);
}
/**
* Registers given method with provided context and event.
*/
<<<<<<< MINE
public void registerObserver(Context context, Object instance, Method method, Class event) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<ObserverReference<?>>>();
registrations.put(context, methods);
}
Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<ObserverReference<?>>();
methods.put(event, observers);
}
/*
final Returns returns = (Returns) event.getAnnotation(Returns.class);
if( returns!=null ) {
if( !returns.value().isAssignableFrom(method.getReturnType()) )
throw new RuntimeException( String.format("Method %s.%s does not return a value that is assignable to %s",method.getDeclaringClass().getName(),method.getName(),returns.value().getName()) );
if( !observers.isEmpty() ) {
final ObserverReference observer = observers.iterator().next();
throw new RuntimeException( String.format("Only one observer allowed for event types that return a value annotation. Previously registered observer is %s.%s", observer.method.getDeclaringClass().getName(), observer.method.getName()));
}
}
*/
observers.add(new ObserverReference(instance, method));
}
=======
>>>>>>> YOURS
/**
* Unregisters all methods observing the given event from the provided context.
*/
<<<<<<< MINE
public void unregisterObserver(Context context, Object instance, Class event) {
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
=======
public <T> void unregisterObserver(Context context, Object instance, Class<T> event) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener listener = iterator.next();
if( listener instanceof ObserverMethodListener ) {
final ObserverMethodListener observer = ((ObserverMethodListener)listener);
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*
* @param context associated with event
*/
public void clear( Context context ) {
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param event observed
*/
public void fire( Object event ) {
fire(contextProvider.get(),event);
}
/**
* Register the given EventListener to the contest and event class.
*
* @param context associated with event
* @param event observed
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Context context, Class<T> event, EventListener listener ) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<EventListener<?>>>();
registrations.put(context, methods);
}
Set<EventListener<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<EventListener<?>>();
methods.put(event, observers);
}
observers.add(listener);
}
/**
* Registers given method with provided context and event.
*
* @param context associated with event
* @param instance to be called
* @param method to be called
* @param event observed
*/
public <T> void registerObserver(Context context, Object instance, Method method, Class<T> event) {
registerObserver(context, event, new ObserverMethodListener<T>(instance, method));
}
/**
* Unregisters the provided event listener from the given event
*
* @param context associated with event
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Context context, Class<T> event, EventListener<T> listener ) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener registeredListener = iterator.next();
if (registeredListener == listener) {
iterator.remove();
break;
}
}
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context associated with event
* @param event observed
*/
public void fire(Context context, Object event) {
<<<<<<< MINE
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
=======
/*
if( event.getClass().getAnnotation(Returns.class)!=null )
throw new RuntimeException("You must use notifyWithResult for events that expect return values");
*/
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (EventListener observer : observers)
observer.onEvent(event);
}
public static class ObserverMethodListener <T> implements EventListener<T> {
protected String descriptor;
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverMethodListener(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
this.descriptor = method.getName() + ':' + RuntimeSupport.makeDescriptor(method);
method.setAccessible(true);
}
public void onEvent(T event) {
try {
final Object instance = instanceReference.get();
if (instance != null) {
method.invoke(instance, event);
} else {
Ln.w("trying to observe event %1$s on disposed context, consider explicitly calling EventManager.unregisterObserver", method.getName());
}
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final ObserverMethodListener that = (ObserverMethodListener) o;
if (descriptor != null ? !descriptor.equals(that.descriptor) : that.descriptor != null) return false;
final Object thisInstance = instanceReference.get();
final Object thatInstance = that.instanceReference.get();
return !(thisInstance != null ? !thisInstance.equals(thatInstance) : thatInstance != null);
}
@Override
public int hashCode() {
int result = descriptor != null ? descriptor.hashCode() : 0;
final Object thisInstance = instanceReference.get();
result = 31 * result + (thisInstance != null ? thisInstance.hashCode() : 0);
return result;
}
}
}
package roboguice.event;
import roboguice.event.javaassist.RuntimeSupport;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
<<<<<<< MINE
public static class NullEventManager extends EventManager {
@Override
public boolean isEnabled() {
return false;
}
}
=======
>>>>>>> YOURS
<<<<<<< MINE
=======
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* A result handler can be provided to deal with the return values from the invoked observer methods.
*
* @param context
* @param event
*/
/*
// Disabled for now until we can figure out best way to proceed
public <ResultType> ResultType notifyWithResult(Context context, Object event, ResultType defaultValue ) {
if (!isEnabled()) return defaultValue;
if( event.getClass().getAnnotation(Returns.class)==null )
throw new RuntimeException("You must use fire with events that do not expect return values");
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) return defaultValue;
final Set<ObserverReference<?>> observers = methods.get(event.getClass());
if (observers == null) return defaultValue;
for (ObserverReference<?> o : observers) {
final ObserverReference<ResultType> observer = (ObserverReference<ResultType>) o;
try {
return observer.invoke( event, defaultValue);
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
return defaultValue;
}
*/
public static class ObserverReference <ResultType> {
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverReference(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
method.setAccessible(true);
}
public ResultType invoke(Object event, ResultType defaultValue ) throws InvocationTargetException, IllegalAccessException {
final Object instance = instanceReference.get();
return instance == null ? defaultValue : (ResultType) method.invoke(instance, event);
}
}
>>>>>>> YOURS
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<EventListener<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<EventListener<?>>>>();
/**
* Register an observer EventListener with the current context (provided).
*
* @param event to observe
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Class<T> event, EventListener listener ) {
registerObserver(contextProvider.get(),event,listener);
}
/**
* Register a method observer with the current context (provided).
*
* @param instance to be called
* @param method to be called
* @param event observed
* @param <T> event type
*/
public <T> void registerObserver(Object instance, Method method, Class<T> event) {
registerObserver(contextProvider.get(),instance,method,event);
}
/**
* Unregister the given EventListener with the current context (provided).
*
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Class<T> event, EventListener<T> listener ) {
unregisterObserver(contextProvider.get(),event,listener);
}
/**
* Unregister the given event from the current context (provided).
*
* @param instance to be unregistered
* @param event observed
* @param <T> event type
*/
public <T> void unregisterObserver(Object instance, Class<T> event) {
unregisterObserver(contextProvider.get(),instance,event);
}
/**
* Registers given method with provided context and event.
*/
<<<<<<< MINE
public void registerObserver(Context context, Object instance, Method method, Class event) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<ObserverReference<?>>>();
registrations.put(context, methods);
}
Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<ObserverReference<?>>();
methods.put(event, observers);
}
/*
final Returns returns = (Returns) event.getAnnotation(Returns.class);
if( returns!=null ) {
if( !returns.value().isAssignableFrom(method.getReturnType()) )
throw new RuntimeException( String.format("Method %s.%s does not return a value that is assignable to %s",method.getDeclaringClass().getName(),method.getName(),returns.value().getName()) );
if( !observers.isEmpty() ) {
final ObserverReference observer = observers.iterator().next();
throw new RuntimeException( String.format("Only one observer allowed for event types that return a value annotation. Previously registered observer is %s.%s", observer.method.getDeclaringClass().getName(), observer.method.getName()));
}
}
*/
observers.add(new ObserverReference(instance, method));
}
=======
>>>>>>> YOURS
/**
* Unregisters all methods observing the given event from the provided context.
*/
<<<<<<< MINE
public void unregisterObserver(Context context, Object instance, Class event) {
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
=======
public <T> void unregisterObserver(Context context, Object instance, Class<T> event) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener listener = iterator.next();
if( listener instanceof ObserverMethodListener ) {
final ObserverMethodListener observer = ((ObserverMethodListener)listener);
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*
* @param context associated with event
*/
public void clear( Context context ) {
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param event observed
*/
public void fire( Object event ) {
fire(contextProvider.get(),event);
}
/**
* Register the given EventListener to the contest and event class.
*
* @param context associated with event
* @param event observed
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Context context, Class<T> event, EventListener listener ) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<EventListener<?>>>();
registrations.put(context, methods);
}
Set<EventListener<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<EventListener<?>>();
methods.put(event, observers);
}
observers.add(listener);
}
/**
* Registers given method with provided context and event.
*
* @param context associated with event
* @param instance to be called
* @param method to be called
* @param event observed
*/
public <T> void registerObserver(Context context, Object instance, Method method, Class<T> event) {
registerObserver(context, event, new ObserverMethodListener<T>(instance, method));
}
/**
* Unregisters the provided event listener from the given event
*
* @param context associated with event
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Context context, Class<T> event, EventListener<T> listener ) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener registeredListener = iterator.next();
if (registeredListener == listener) {
iterator.remove();
break;
}
}
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context associated with event
* @param event observed
*/
public void fire(Context context, Object event) {
<<<<<<< MINE
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
=======
/*
if( event.getClass().getAnnotation(Returns.class)!=null )
throw new RuntimeException("You must use notifyWithResult for events that expect return values");
*/
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (EventListener observer : observers)
observer.onEvent(event);
}
public static class ObserverMethodListener <T> implements EventListener<T> {
protected String descriptor;
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverMethodListener(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
this.descriptor = method.getName() + ':' + RuntimeSupport.makeDescriptor(method);
method.setAccessible(true);
}
public void onEvent(T event) {
try {
final Object instance = instanceReference.get();
if (instance != null) {
method.invoke(instance, event);
} else {
Ln.w("trying to observe event %1$s on disposed context, consider explicitly calling EventManager.unregisterObserver", method.getName());
}
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final ObserverMethodListener that = (ObserverMethodListener) o;
if (descriptor != null ? !descriptor.equals(that.descriptor) : that.descriptor != null) return false;
final Object thisInstance = instanceReference.get();
final Object thatInstance = that.instanceReference.get();
return !(thisInstance != null ? !thisInstance.equals(thatInstance) : thatInstance != null);
}
@Override
public int hashCode() {
int result = descriptor != null ? descriptor.hashCode() : 0;
final Object thisInstance = instanceReference.get();
result = 31 * result + (thisInstance != null ? thisInstance.hashCode() : 0);
return result;
}
}
}
Unstructured
package roboguice.event;
import roboguice.event.javaassist.RuntimeSupport;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<EventListener<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<EventListener<?>>>>();
/**
* Register an observer EventListener with the current context (provided).
*
* @param event to observe
* @param listener to be triggered
* @param <T> event type
*/
<<<<<<< MINE
public <T> void registerObserver( Class<T> event, EventListener listener ) {
registerObserver(contextProvider.get(),event,listener);
=======
public void registerObserver(Context context, Object instance, Method method, Class event) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<ObserverReference<?>>>();
registrations.put(context, methods);
}
Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<ObserverReference<?>>();
methods.put(event, observers);
}
/*
final Returns returns = (Returns) event.getAnnotation(Returns.class);
if( returns!=null ) {
if( !returns.value().isAssignableFrom(method.getReturnType()) )
throw new RuntimeException( String.format("Method %s.%s does not return a value that is assignable to %s",method.getDeclaringClass().getName(),method.getName(),returns.value().getName()) );
if( !observers.isEmpty() ) {
final ObserverReference observer = observers.iterator().next();
throw new RuntimeException( String.format("Only one observer allowed for event types that return a value annotation. Previously registered observer is %s.%s", observer.method.getDeclaringClass().getName(), observer.method.getName()));
}
}
*/
observers.add(new ObserverReference(instance, method));
>>>>>>> YOURS
}
/**
* Register a method observer with the current context (provided).
*
* @param instance to be called
* @param method to be called
* @param event observed
* @param <T> event type
*/
<<<<<<< MINE
public <T> void registerObserver(Object instance, Method method, Class<T> event) {
registerObserver(contextProvider.get(),instance,method,event);
}
/**
* Unregister the given EventListener with the current context (provided).
*
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Class<T> event, EventListener<T> listener ) {
unregisterObserver(contextProvider.get(),event,listener);
}
/**
* Unregister the given event from the current context (provided).
*
* @param instance to be unregistered
* @param event observed
* @param <T> event type
*/
public <T> void unregisterObserver(Object instance, Class<T> event) {
unregisterObserver(contextProvider.get(),instance,event);
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param event observed
*/
public void fire( Object event ) {
fire(contextProvider.get(), event);
}
/**
* Register the given EventListener to the contest and event class.
*
* @param context associated with event
* @param event observed
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Context context, Class<T> event, EventListener listener ) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<EventListener<?>>>();
registrations.put(context, methods);
}
Set<EventListener<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<EventListener<?>>();
methods.put(event, observers);
}
observers.add(listener);
}
/**
* Registers given method with provided context and event.
*
* @param context associated with event
* @param instance to be called
* @param method to be called
* @param event observed
*/
public <T> void registerObserver(Context context, Object instance, Method method, Class<T> event) {
registerObserver(context, event, new ObserverMethodListener<T>(instance, method));
}
/**
* Unregisters the provided event listener from the given event
*
* @param context associated with event
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Context context, Class<T> event, EventListener<T> listener ) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
=======
public void unregisterObserver(Context context, Object instance, Class event) {
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener registeredListener = iterator.next();
if (registeredListener == listener) {
iterator.remove();
break;
}
}
}
/**
* Unregister all methods observing the given event from the provided context.
*
* @param context associated with event
* @param instance to be unregistered
* @param event observed
*/
public <T> void unregisterObserver(Context context, Object instance, Class<T> event) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener listener = iterator.next();
if( listener instanceof ObserverMethodListener ) {
final ObserverMethodListener observer = ((ObserverMethodListener)listener);
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*
* @param context associated with event
*/
public void clear( Context context ) {
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context associated with event
* @param event observed
*/
public void fire(Context context, Object event) {
<<<<<<< MINE
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
=======
/*
if( event.getClass().getAnnotation(Returns.class)!=null )
throw new RuntimeException("You must use notifyWithResult for events that expect return values");
*/
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (EventListener observer : observers)
observer.onEvent(event);
}
<<<<<<< MINE
public static class NullEventManager extends EventManager {
@Override
public boolean isEnabled() {
return false;
}
}
public static class ObserverMethodListener<T> implements EventListener<T> {
protected String descriptor;
=======
public static class ObserverReference<ResultType> {
>>>>>>> YOURS
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverMethodListener(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
this.descriptor = method.getName() + ':' + RuntimeSupport.makeDescriptor(method);
method.setAccessible(true);
}
public void onEvent(T event) {
try {
final Object instance = instanceReference.get();
if (instance != null) {
method.invoke(instance, event);
} else {
Ln.w("trying to observe event %1$s on disposed context, consider explicitly calling EventManager.unregisterObserver", method.getName());
}
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final ObserverMethodListener that = (ObserverMethodListener) o;
if (descriptor != null ? !descriptor.equals(that.descriptor) : that.descriptor != null) return false;
final Object thisInstance = instanceReference.get();
final Object thatInstance = that.instanceReference.get();
return !(thisInstance != null ? !thisInstance.equals(thatInstance) : thatInstance != null);
}
@Override
public int hashCode() {
int result = descriptor != null ? descriptor.hashCode() : 0;
final Object thisInstance = instanceReference.get();
result = 31 * result + (thisInstance != null ? thisInstance.hashCode() : 0);
return result;
}
}
}package roboguice.event;
import roboguice.event.javaassist.RuntimeSupport;
import roboguice.util.Ln;
import android.app.Application;
import android.content.Context;
import com.google.inject.Inject;
import com.google.inject.Provider;
import com.google.inject.Singleton;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.*;
/**
* Manager class handling the following:
*
* Registration of event observing methods:
* registerObserver()
* unregisterObserver()
* clear()
* Raising Events:
* fire()
* notifyWithResult()
*
* @author Adam Tybor
* @author John Ericksen
*/
@SuppressWarnings({"unchecked"})
@Singleton
public class EventManager {
@Inject protected Provider<Context> contextProvider;
protected Map<Context, Map<Class<?>, Set<EventListener<?>>>> registrations = new WeakHashMap<Context, Map<Class<?>, Set<EventListener<?>>>>();
/**
* Register an observer EventListener with the current context (provided).
*
* @param event to observe
* @param listener to be triggered
* @param <T> event type
*/
<<<<<<< MINE
public <T> void registerObserver( Class<T> event, EventListener listener ) {
registerObserver(contextProvider.get(),event,listener);
=======
public void registerObserver(Context context, Object instance, Method method, Class event) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<ObserverReference<?>>>();
registrations.put(context, methods);
}
Set<ObserverReference<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<ObserverReference<?>>();
methods.put(event, observers);
}
/*
final Returns returns = (Returns) event.getAnnotation(Returns.class);
if( returns!=null ) {
if( !returns.value().isAssignableFrom(method.getReturnType()) )
throw new RuntimeException( String.format("Method %s.%s does not return a value that is assignable to %s",method.getDeclaringClass().getName(),method.getName(),returns.value().getName()) );
if( !observers.isEmpty() ) {
final ObserverReference observer = observers.iterator().next();
throw new RuntimeException( String.format("Only one observer allowed for event types that return a value annotation. Previously registered observer is %s.%s", observer.method.getDeclaringClass().getName(), observer.method.getName()));
}
}
*/
observers.add(new ObserverReference(instance, method));
>>>>>>> YOURS
}
/**
* Register a method observer with the current context (provided).
*
* @param instance to be called
* @param method to be called
* @param event observed
* @param <T> event type
*/
<<<<<<< MINE
public <T> void registerObserver(Object instance, Method method, Class<T> event) {
registerObserver(contextProvider.get(),instance,method,event);
}
/**
* Unregister the given EventListener with the current context (provided).
*
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Class<T> event, EventListener<T> listener ) {
unregisterObserver(contextProvider.get(),event,listener);
}
/**
* Unregister the given event from the current context (provided).
*
* @param instance to be unregistered
* @param event observed
* @param <T> event type
*/
public <T> void unregisterObserver(Object instance, Class<T> event) {
unregisterObserver(contextProvider.get(),instance,event);
}
/**
* Raises the event's class' event on the current context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param event observed
*/
public void fire( Object event ) {
fire(contextProvider.get(), event);
}
/**
* Register the given EventListener to the contest and event class.
*
* @param context associated with event
* @param event observed
* @param listener to be triggered
* @param <T> event type
*/
public <T> void registerObserver( Context context, Class<T> event, EventListener listener ) {
if( context instanceof Application )
throw new RuntimeException("You may not register event handlers on the Application context");
Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) {
methods = new HashMap<Class<?>, Set<EventListener<?>>>();
registrations.put(context, methods);
}
Set<EventListener<?>> observers = methods.get(event);
if (observers == null) {
observers = new HashSet<EventListener<?>>();
methods.put(event, observers);
}
observers.add(listener);
}
/**
* Registers given method with provided context and event.
*
* @param context associated with event
* @param instance to be called
* @param method to be called
* @param event observed
*/
public <T> void registerObserver(Context context, Object instance, Method method, Class<T> event) {
registerObserver(context, event, new ObserverMethodListener<T>(instance, method));
}
/**
* Unregisters the provided event listener from the given event
*
* @param context associated with event
* @param event observed
* @param listener to be unregistered
* @param <T> event type
*/
public <T> void unregisterObserver(Context context, Class<T> event, EventListener<T> listener ) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
=======
public void unregisterObserver(Context context, Object instance, Class event) {
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener registeredListener = iterator.next();
if (registeredListener == listener) {
iterator.remove();
break;
}
}
}
/**
* Unregister all methods observing the given event from the provided context.
*
* @param context associated with event
* @param instance to be unregistered
* @param event observed
*/
public <T> void unregisterObserver(Context context, Object instance, Class<T> event) {
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event);
if (observers == null) return;
for (Iterator<EventListener<?>> iterator = observers.iterator(); iterator.hasNext();) {
final EventListener listener = iterator.next();
if( listener instanceof ObserverMethodListener ) {
final ObserverMethodListener observer = ((ObserverMethodListener)listener);
final Object registeredInstance = observer.instanceReference.get();
if (registeredInstance == instance) {
iterator.remove();
break;
}
}
}
}
/**
* Clears all observers of the given context.
*
* @param context associated with event
*/
public void clear( Context context ) {
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
if (methods == null) return;
registrations.remove(context);
methods.clear();
}
/**
* Raises the event's class' event on the given context. This event object is passed (if configured) to the
* registered observer's method.
*
* @param context associated with event
* @param event observed
*/
public void fire(Context context, Object event) {
<<<<<<< MINE
if (!isEnabled()) return;
final Map<Class<?>, Set<EventListener<?>>> methods = registrations.get(context);
=======
/*
if( event.getClass().getAnnotation(Returns.class)!=null )
throw new RuntimeException("You must use notifyWithResult for events that expect return values");
*/
final Map<Class<?>, Set<ObserverReference<?>>> methods = registrations.get(context);
>>>>>>> YOURS
if (methods == null) return;
final Set<EventListener<?>> observers = methods.get(event.getClass());
if (observers == null) return;
for (EventListener observer : observers)
observer.onEvent(event);
}
<<<<<<< MINE
public static class NullEventManager extends EventManager {
@Override
public boolean isEnabled() {
return false;
}
}
public static class ObserverMethodListener<T> implements EventListener<T> {
protected String descriptor;
=======
public static class ObserverReference<ResultType> {
>>>>>>> YOURS
protected Method method;
protected WeakReference<Object> instanceReference;
public ObserverMethodListener(Object instance, Method method) {
this.instanceReference = new WeakReference<Object>(instance);
this.method = method;
this.descriptor = method.getName() + ':' + RuntimeSupport.makeDescriptor(method);
method.setAccessible(true);
}
public void onEvent(T event) {
try {
final Object instance = instanceReference.get();
if (instance != null) {
method.invoke(instance, event);
} else {
Ln.w("trying to observe event %1$s on disposed context, consider explicitly calling EventManager.unregisterObserver", method.getName());
}
} catch (InvocationTargetException e) {
Ln.e(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final ObserverMethodListener that = (ObserverMethodListener) o;
if (descriptor != null ? !descriptor.equals(that.descriptor) : that.descriptor != null) return false;
final Object thisInstance = instanceReference.get();
final Object thatInstance = that.instanceReference.get();
return !(thisInstance != null ? !thisInstance.equals(thatInstance) : thatInstance != null);
}
@Override
public int hashCode() {
int result = descriptor != null ? descriptor.hashCode() : 0;
final Object thisInstance = instanceReference.get();
result = 31 * result + (thisInstance != null ? thisInstance.hashCode() : 0);
return result;
}
}
}
Diff Result
No diff
Case 25 - voldemort.rev_6efd9_d55c7.ForceFailStore.java
public ForceFailStore(Store innerStore)
Left modified body and added overloaded constructors
Right modified signature parameter type: Store → Store
Note: Class was also modified from "ForceFailStore extends DelegatingStore" to "ForceFailStore extends DelegatingStore"
Unstructured reported conflict between closest versions
Safe reported conflict between closest versions
MergeMethods merged most similar versions
Note: FN was introduced, as now some constructors use Store and others use Store and no conflict was reported
KeepBothMethods kept all versions of constructor
Note: FN was introduced, as now some constructors use Store and others use Store and no conflict was reported
Base
package voldemort.store;
public class ForceFailStore<K, V> extends DelegatingStore<K, V> {
public ForceFailStore(Store<K, V> innerStore) {
super(innerStore);
}
}
package voldemort.store;
public class ForceFailStore<K, V> extends DelegatingStore<K, V> {
public ForceFailStore(Store<K, V> innerStore) {
super(innerStore);
}
}
Left
package voldemort.store;
import voldemort.VoldemortException;
import voldemort.versioning.Version;
import voldemort.versioning.Versioned;
import java.util.List;
import java.util.Map;
public class ForceFailStore<K, V> extends DelegatingStore<K, V> {
private final VoldemortException e;
private final Object identifier;
private volatile boolean fail = false;
public ForceFailStore(Store<K, V> innerStore) {
this(innerStore, new VoldemortException("Operation failed!"));
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e) {
this(innerStore, e, "unknown");
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e, Object identifier) {
super(innerStore);
this.e = e;
this.identifier = identifier;
}
public void setFail(boolean fail) {
this.fail = fail;
}
public Object getIdentifier() {
return identifier;
}
@Override
public void put(K key, Versioned<V> value) throws VoldemortException {
if(fail)
throw e;
getInnerStore().put(key, value);
}
@Override
public boolean delete(K key, Version version) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().delete(key, version);
}
@Override
public Map<K, List<Versioned<V>>> getAll(Iterable<K> keys) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().getAll(keys);
}
@Override
public List<Versioned<V>> get(K key) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().get(key);
}
}
package voldemort.store;
import voldemort.VoldemortException;
import voldemort.versioning.Version;
import voldemort.versioning.Versioned;
import java.util.List;
import java.util.Map;
public class ForceFailStore<K, V> extends DelegatingStore<K, V> {
private final VoldemortException e;
private final Object identifier;
private volatile boolean fail = false;
public ForceFailStore(Store<K, V> innerStore) {
this(innerStore, new VoldemortException("Operation failed!"));
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e) {
this(innerStore, e, "unknown");
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e, Object identifier) {
super(innerStore);
this.e = e;
this.identifier = identifier;
}
public void setFail(boolean fail) {
this.fail = fail;
}
public Object getIdentifier() {
return identifier;
}
@Override
public void put(K key, Versioned<V> value) throws VoldemortException {
if(fail)
throw e;
getInnerStore().put(key, value);
}
@Override
public boolean delete(K key, Version version) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().delete(key, version);
}
@Override
public Map<K, List<Versioned<V>>> getAll(Iterable<K> keys) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().getAll(keys);
}
@Override
public List<Versioned<V>> get(K key) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().get(key);
}
}
Right
package voldemort.store;
public class ForceFailStore<K, V, T> extends DelegatingStore<K, V, T> {
public ForceFailStore(Store<K, V, T> innerStore) {
super(innerStore);
}
}
package voldemort.store;
public class ForceFailStore<K, V, T> extends DelegatingStore<K, V, T> {
public ForceFailStore(Store<K, V, T> innerStore) {
super(innerStore);
}
}
MergeMethods
package voldemort.store;
import voldemort.VoldemortException;
import voldemort.versioning.Version;
import voldemort.versioning.Versioned;
import java.util.List;
import java.util.Map;
public class ForceFailStore<K, V, T> extends DelegatingStore<K, V, T> {
private final VoldemortException e;
private final Object identifier;
private volatile boolean fail = false;
public ForceFailStore(Store<K, V, T> innerStore) {
this(innerStore, new VoldemortException("Operation failed!"));
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e) {
this(innerStore, e, "unknown");
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e, Object identifier) {
super(innerStore);
this.e = e;
this.identifier = identifier;
}
public void setFail(boolean fail) {
this.fail = fail;
}
public Object getIdentifier() {
return identifier;
}
@Override
public void put(K key, Versioned<V> value) throws VoldemortException {
if (fail)
throw e;
getInnerStore().put(key, value);
}
@Override
public boolean delete(K key, Version version) throws VoldemortException {
if (fail)
throw e;
return getInnerStore().delete(key, version);
}
@Override
public Map<K, List<Versioned<V>>> getAll(Iterable<K> keys) throws VoldemortException {
if (fail)
throw e;
return getInnerStore().getAll(keys);
}
@Override
public List<Versioned<V>> get(K key) throws VoldemortException {
if (fail)
throw e;
return getInnerStore().get(key);
}
}
package voldemort.store;
import voldemort.VoldemortException;
import voldemort.versioning.Version;
import voldemort.versioning.Versioned;
import java.util.List;
import java.util.Map;
public class ForceFailStore<K, V, T> extends DelegatingStore<K, V, T> {
private final VoldemortException e;
private final Object identifier;
private volatile boolean fail = false;
public ForceFailStore(Store<K, V, T> innerStore) {
this(innerStore, new VoldemortException("Operation failed!"));
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e) {
this(innerStore, e, "unknown");
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e, Object identifier) {
super(innerStore);
this.e = e;
this.identifier = identifier;
}
public void setFail(boolean fail) {
this.fail = fail;
}
public Object getIdentifier() {
return identifier;
}
@Override
public void put(K key, Versioned<V> value) throws VoldemortException {
if (fail)
throw e;
getInnerStore().put(key, value);
}
@Override
public boolean delete(K key, Version version) throws VoldemortException {
if (fail)
throw e;
return getInnerStore().delete(key, version);
}
@Override
public Map<K, List<Versioned<V>>> getAll(Iterable<K> keys) throws VoldemortException {
if (fail)
throw e;
return getInnerStore().getAll(keys);
}
@Override
public List<Versioned<V>> get(K key) throws VoldemortException {
if (fail)
throw e;
return getInnerStore().get(key);
}
}
KeepBothMethods
package voldemort.store;
import voldemort.VoldemortException;
import voldemort.versioning.Version;
import voldemort.versioning.Versioned;
import java.util.List;
import java.util.Map;
public class ForceFailStore<K, V, T> extends DelegatingStore<K, V, T> {
private final VoldemortException e;
private final Object identifier;
private volatile boolean fail = false;
public ForceFailStore(Store<K, V> innerStore) {
this(innerStore, new VoldemortException("Operation failed!"));
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e) {
this(innerStore, e, "unknown");
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e, Object identifier) {
super(innerStore);
this.e = e;
this.identifier = identifier;
}
public void setFail(boolean fail) {
this.fail = fail;
}
public Object getIdentifier() {
return identifier;
}
@Override
public void put(K key, Versioned<V> value) throws VoldemortException {
if (fail)
throw e;
getInnerStore().put(key, value);
}
@Override
public boolean delete(K key, Version version) throws VoldemortException {
if (fail)
throw e;
return getInnerStore().delete(key, version);
}
@Override
public Map<K, List<Versioned<V>>> getAll(Iterable<K> keys) throws VoldemortException {
if (fail)
throw e;
return getInnerStore().getAll(keys);
}
@Override
public List<Versioned<V>> get(K key) throws VoldemortException {
if (fail)
throw e;
return getInnerStore().get(key);
}
public ForceFailStore(Store<K, V, T> innerStore) {
super(innerStore);
}
}
package voldemort.store;
import voldemort.VoldemortException;
import voldemort.versioning.Version;
import voldemort.versioning.Versioned;
import java.util.List;
import java.util.Map;
public class ForceFailStore<K, V, T> extends DelegatingStore<K, V, T> {
private final VoldemortException e;
private final Object identifier;
private volatile boolean fail = false;
public ForceFailStore(Store<K, V> innerStore) {
this(innerStore, new VoldemortException("Operation failed!"));
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e) {
this(innerStore, e, "unknown");
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e, Object identifier) {
super(innerStore);
this.e = e;
this.identifier = identifier;
}
public void setFail(boolean fail) {
this.fail = fail;
}
public Object getIdentifier() {
return identifier;
}
@Override
public void put(K key, Versioned<V> value) throws VoldemortException {
if (fail)
throw e;
getInnerStore().put(key, value);
}
@Override
public boolean delete(K key, Version version) throws VoldemortException {
if (fail)
throw e;
return getInnerStore().delete(key, version);
}
@Override
public Map<K, List<Versioned<V>>> getAll(Iterable<K> keys) throws VoldemortException {
if (fail)
throw e;
return getInnerStore().getAll(keys);
}
@Override
public List<Versioned<V>> get(K key) throws VoldemortException {
if (fail)
throw e;
return getInnerStore().get(key);
}
public ForceFailStore(Store<K, V, T> innerStore) {
super(innerStore);
}
}
Safe
package voldemort.store;
import voldemort.VoldemortException;
import voldemort.versioning.Version;
import voldemort.versioning.Versioned;
import java.util.List;
import java.util.Map;
public class ForceFailStore <K, V, T> extends DelegatingStore<K, V, T> {
private final VoldemortException e;
private final Object identifier;
private volatile boolean fail = false;
<<<<<<< MINE
public ForceFailStore(Store<K, V, T> innerStore) {
super(innerStore);
=======
public ForceFailStore(Store<K, V> innerStore) {
this(innerStore, new VoldemortException("Operation failed!"));
>>>>>>> YOURS
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e) {
this(innerStore, e, "unknown");
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e, Object identifier) {
super(innerStore);
this.e = e;
this.identifier = identifier;
}
public void setFail(boolean fail) {
this.fail = fail;
}
public Object getIdentifier() {
return identifier;
}
@Override
public void put(K key, Versioned<V> value) throws VoldemortException {
if(fail)
throw e;
getInnerStore().put(key, value);
}
@Override
public boolean delete(K key, Version version) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().delete(key, version);
}
@Override
public Map<K, List<Versioned<V>>> getAll(Iterable<K> keys) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().getAll(keys);
}
@Override
public List<Versioned<V>> get(K key) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().get(key);
}
}
package voldemort.store;
import voldemort.VoldemortException;
import voldemort.versioning.Version;
import voldemort.versioning.Versioned;
import java.util.List;
import java.util.Map;
public class ForceFailStore <K, V, T> extends DelegatingStore<K, V, T> {
private final VoldemortException e;
private final Object identifier;
private volatile boolean fail = false;
<<<<<<< MINE
public ForceFailStore(Store<K, V, T> innerStore) {
super(innerStore);
=======
public ForceFailStore(Store<K, V> innerStore) {
this(innerStore, new VoldemortException("Operation failed!"));
>>>>>>> YOURS
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e) {
this(innerStore, e, "unknown");
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e, Object identifier) {
super(innerStore);
this.e = e;
this.identifier = identifier;
}
public void setFail(boolean fail) {
this.fail = fail;
}
public Object getIdentifier() {
return identifier;
}
@Override
public void put(K key, Versioned<V> value) throws VoldemortException {
if(fail)
throw e;
getInnerStore().put(key, value);
}
@Override
public boolean delete(K key, Version version) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().delete(key, version);
}
@Override
public Map<K, List<Versioned<V>>> getAll(Iterable<K> keys) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().getAll(keys);
}
@Override
public List<Versioned<V>> get(K key) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().get(key);
}
}
Unstructured
package voldemort.store;
<<<<<<< MINE
import voldemort.VoldemortException;
import voldemort.versioning.Version;
import voldemort.versioning.Versioned;
import java.util.List;
import java.util.Map;
public class ForceFailStore<K, V> extends DelegatingStore<K, V> {
=======
public class ForceFailStore<K, V, T> extends DelegatingStore<K, V, T> {
>>>>>>> YOURS
<<<<<<< MINE
private final VoldemortException e;
private final Object identifier;
private volatile boolean fail = false;
public ForceFailStore(Store<K, V> innerStore) {
this(innerStore, new VoldemortException("Operation failed!"));
=======
public ForceFailStore(Store<K, V, T> innerStore) {
super(innerStore);
>>>>>>> YOURS
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e) {
this(innerStore, e, "unknown");
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e, Object identifier) {
super(innerStore);
this.e = e;
this.identifier = identifier;
}
public void setFail(boolean fail) {
this.fail = fail;
}
public Object getIdentifier() {
return identifier;
}
@Override
public void put(K key, Versioned<V> value) throws VoldemortException {
if(fail)
throw e;
getInnerStore().put(key, value);
}
@Override
public boolean delete(K key, Version version) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().delete(key, version);
}
@Override
public Map<K, List<Versioned<V>>> getAll(Iterable<K> keys) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().getAll(keys);
}
@Override
public List<Versioned<V>> get(K key) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().get(key);
}
}package voldemort.store;
<<<<<<< MINE
import voldemort.VoldemortException;
import voldemort.versioning.Version;
import voldemort.versioning.Versioned;
import java.util.List;
import java.util.Map;
public class ForceFailStore<K, V> extends DelegatingStore<K, V> {
=======
public class ForceFailStore<K, V, T> extends DelegatingStore<K, V, T> {
>>>>>>> YOURS
<<<<<<< MINE
private final VoldemortException e;
private final Object identifier;
private volatile boolean fail = false;
public ForceFailStore(Store<K, V> innerStore) {
this(innerStore, new VoldemortException("Operation failed!"));
=======
public ForceFailStore(Store<K, V, T> innerStore) {
super(innerStore);
>>>>>>> YOURS
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e) {
this(innerStore, e, "unknown");
}
public ForceFailStore(Store<K, V> innerStore, VoldemortException e, Object identifier) {
super(innerStore);
this.e = e;
this.identifier = identifier;
}
public void setFail(boolean fail) {
this.fail = fail;
}
public Object getIdentifier() {
return identifier;
}
@Override
public void put(K key, Versioned<V> value) throws VoldemortException {
if(fail)
throw e;
getInnerStore().put(key, value);
}
@Override
public boolean delete(K key, Version version) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().delete(key, version);
}
@Override
public Map<K, List<Versioned<V>>> getAll(Iterable<K> keys) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().getAll(keys);
}
@Override
public List<Versioned<V>> get(K key) throws VoldemortException {
if(fail)
throw e;
return getInnerStore().get(key);
}
}